diff --git a/.github/workflows/test-build.yml b/.github/workflows/test-build.yml index 13580e73007..2f103f53b79 100644 --- a/.github/workflows/test-build.yml +++ b/.github/workflows/test-build.yml @@ -11,6 +11,7 @@ env: ELASTICSEARCH6_ARCHIVE: elasticsearch-6.3.1.tar.gz OSF_DB_PORT: 5432 OSF_DB_PASSWORD: postgres + GITHUB_ACTIONS: true jobs: build-cache: @@ -167,3 +168,36 @@ jobs: - name: Upload report if: (success() || failure()) # run this step even if previous step failed uses: ./.github/actions/gen-report + + mailhog: + runs-on: ubuntu-22.04 + permissions: + checks: write + needs: build-cache + services: + postgres: + image: postgres + + env: + POSTGRES_PASSWORD: ${{ env.OSF_DB_PASSWORD }} + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + # Maps tcp port 5432 on service container to the host + - 5432:5432 + mailhog: + image: mailhog/mailhog + ports: + - 1025:1025 + - 8025:8025 + steps: + - uses: actions/checkout@v2 + - uses: ./.github/actions/start-build + - name: Run tests + run: poetry run python3 -m invoke test-ci-mailhog -n 1 --junit + - name: Upload report + if: (github.event_name != 'pull_request') && (success() || failure()) # run this step even if previous step failed + uses: ./.github/actions/gen-report diff --git a/README-docker-compose.md b/README-docker-compose.md index c3ff7aed542..e3987b6b7bd 100644 --- a/README-docker-compose.md +++ b/README-docker-compose.md @@ -271,10 +271,8 @@ docker compose run --rm web python3 -m scripts.parse_citation_styles ``` - Start ember_osf_web - - Needed for quickfiles feature: - ```bash - docker compose up -d ember_osf_web - ``` + - Needed for ember app: + - `docker-compose up -d ember_osf_web` - OPTIONAL: Register OAuth Scopes - Needed for things such as the ember-osf dummy app ```bash diff --git a/addons/base/views.py b/addons/base/views.py index a6c90860b98..b302115bb7f 100644 --- a/addons/base/views.py +++ b/addons/base/views.py @@ -21,7 +21,6 @@ from addons.base import exceptions as addon_errors from addons.base.models import BaseStorageAddon -from addons.osfstorage.models import OsfStorageFile from addons.osfstorage.models import OsfStorageFileNode from addons.osfstorage.utils import enqueue_update_analytics @@ -34,7 +33,6 @@ from framework.exceptions import HTTPError from framework.flask import redirect from framework.sentry import log_exception -from framework.routing import proxy_url from framework.transactions.handlers import no_auto_transaction from website import mails from website import settings @@ -483,7 +481,7 @@ def _construct_payload(auth, resource, credentials, waterbutler_settings): @must_be_signed @no_auto_transaction -@must_be_valid_project(quickfiles_valid=True, preprints_valid=True) +@must_be_valid_project(preprints_valid=True) def create_waterbutler_log(payload, **kwargs): with transaction.atomic(): try: @@ -603,7 +601,7 @@ def create_waterbutler_log(payload, **kwargs): metadata = payload.get('metadata') or payload.get('destination') target_node = AbstractNode.load(metadata.get('nid')) - if target_node and not target_node.is_quickfiles and payload['action'] != 'download_file': + if target_node and payload['action'] != 'download_file': update_storage_usage_with_size(payload) with transaction.atomic(): @@ -1032,16 +1030,6 @@ def persistent_file_download(auth, **kwargs): ) -def addon_view_or_download_quickfile(**kwargs): - fid = kwargs.get('fid', 'NOT_AN_FID') - file_ = OsfStorageFile.load(fid) - if not file_: - raise HTTPError(http_status.HTTP_404_NOT_FOUND, data={ - 'message_short': 'File Not Found', - 'message_long': 'The requested file could not be found.' - }) - return proxy_url(f'/project/{file_.target._id}/files/osfstorage/{fid}/') - def addon_view_file(auth, node, file_node, version): # TODO: resolve circular import issue from addons.wiki import settings as wiki_settings diff --git a/addons/boa/tests/test_tasks.py b/addons/boa/tests/test_tasks.py index a4842d6c417..b2dcd6d86bc 100644 --- a/addons/boa/tests/test_tasks.py +++ b/addons/boa/tests/test_tasks.py @@ -38,6 +38,9 @@ def setUp(self): self.output_file_name = 'fake_boa_script_results.txt' self.job_id = '1a2b3c4d5e6f7g8' + from conftest import start_mock_send_grid + self.mock_send_grid = start_mock_send_grid(self) + def tearDown(self): super().tearDown() @@ -52,9 +55,10 @@ def test_boa_error_code(self): assert BoaErrorCode.FILE_TOO_LARGE_ERROR == 6 assert BoaErrorCode.JOB_TIME_OUT_ERROR == 7 + @mock.patch('website.mails.settings.USE_EMAIL', True) + @mock.patch('website.mails.settings.USE_CELERY', False) def test_handle_boa_error(self): - with mock.patch('addons.boa.tasks.send_mail', return_value=None) as mock_send_mail, \ - mock.patch('addons.boa.tasks.sentry.log_message', return_value=None) as mock_sentry_log_message, \ + with mock.patch('addons.boa.tasks.sentry.log_message', return_value=None) as mock_sentry_log_message, \ mock.patch('addons.boa.tasks.logger.error', return_value=None) as mock_logger_error: return_value = handle_boa_error( self.error_message, @@ -68,24 +72,7 @@ def test_handle_boa_error(self): output_file_name=self.output_file_name, job_id=self.job_id ) - mock_send_mail.assert_called_with( - to_addr=self.user_username, - mail=ADDONS_BOA_JOB_FAILURE, - fullname=self.user_fullname, - code=BoaErrorCode.UNKNOWN, - message=self.error_message, - query_file_name=self.query_file_name, - file_size=self.file_size, - max_file_size=boa_settings.MAX_SUBMISSION_SIZE, - query_file_full_path=self.file_full_path, - output_file_name=self.output_file_name, - job_id=self.job_id, - max_job_wait_hours=self.max_job_wait_hours, - project_url=self.project_url, - boa_job_list_url=boa_settings.BOA_JOB_LIST_URL, - boa_support_email=boa_settings.BOA_SUPPORT_EMAIL, - osf_support_email=osf_settings.OSF_SUPPORT_EMAIL, - ) + self.mock_send_grid.assert_called() mock_sentry_log_message.assert_called_with(self.error_message, skip_session=True) mock_logger_error.assert_called_with(self.error_message) assert return_value == BoaErrorCode.UNKNOWN @@ -167,9 +154,14 @@ def setUp(self): boa_settings.REFRESH_JOB_INTERVAL = DEFAULT_REFRESH_JOB_INTERVAL boa_settings.MAX_JOB_WAITING_TIME = DEFAULT_MAX_JOB_WAITING_TIME + from conftest import start_mock_send_grid + self.mock_send_grid = start_mock_send_grid(self) + def tearDown(self): super().tearDown() + @mock.patch('website.mails.settings.USE_EMAIL', True) + @mock.patch('website.mails.settings.USE_CELERY', False) async def test_submit_success(self): with mock.patch('osf.models.user.OSFUser.objects.get', return_value=self.user), \ mock.patch('osf.models.user.OSFUser.get_or_create_cookie', return_value=self.user_cookie), \ @@ -179,7 +171,6 @@ async def test_submit_success(self): mock.patch('boaapi.boa_client.BoaClient.query', return_value=self.mock_job), \ mock.patch('boaapi.boa_client.BoaClient.close', return_value=None) as mock_close, \ mock.patch('asyncio.sleep', new_callable=AsyncMock, return_value=None) as mock_async_sleep, \ - mock.patch('addons.boa.tasks.send_mail', return_value=None) as mock_send_mail, \ mock.patch('addons.boa.tasks.handle_boa_error', return_value=None) as mock_handle_boa_error: return_value = await submit_to_boa_async( self.host, @@ -199,19 +190,7 @@ async def test_submit_success(self): assert self.mock_job.refresh.call_count == 4 assert mock_async_sleep.call_count == 4 mock_close.assert_called() - mock_send_mail.assert_called_with( - to_addr=self.user.username, - mail=ADDONS_BOA_JOB_COMPLETE, - fullname=self.user.fullname, - query_file_name=self.query_file_name, - query_file_full_path=self.file_full_path, - output_file_name=self.output_file_name, - job_id=self.mock_job.id, - project_url=self.project_url, - boa_job_list_url=boa_settings.BOA_JOB_LIST_URL, - boa_support_email=boa_settings.BOA_SUPPORT_EMAIL, - osf_support_email=osf_settings.OSF_SUPPORT_EMAIL, - ) + self.mock_send_grid.assert_called() mock_handle_boa_error.assert_not_called() async def test_download_error(self): diff --git a/addons/osfstorage/tests/test_views.py b/addons/osfstorage/tests/test_views.py index 19940043548..d6c1fffff33 100644 --- a/addons/osfstorage/tests/test_views.py +++ b/addons/osfstorage/tests/test_views.py @@ -24,7 +24,7 @@ from framework.auth import cas from osf import features -from osf.models import Tag, QuickFilesNode +from osf.models import Tag from osf.models import files as models from addons.osfstorage.apps import osf_storage_root from addons.osfstorage import utils diff --git a/addons/osfstorage/views.py b/addons/osfstorage/views.py index a448f3c6edd..e387f34a768 100644 --- a/addons/osfstorage/views.py +++ b/addons/osfstorage/views.py @@ -314,9 +314,6 @@ def osfstorage_create_child(file_node, payload, **kwargs): if not (name or user) or '/' in name: raise HTTPError(http_status.HTTP_400_BAD_REQUEST) - if getattr(file_node.target, 'is_quickfiles', False) and is_folder: - raise HTTPError(http_status.HTTP_400_BAD_REQUEST, data={'message_long': 'You may not create a folder for QuickFiles'}) - try: # Create a save point so that we can rollback and unlock # the parent record diff --git a/admin/base/settings/defaults.py b/admin/base/settings/defaults.py index 579b920949e..52f965ece6b 100644 --- a/admin/base/settings/defaults.py +++ b/admin/base/settings/defaults.py @@ -40,7 +40,7 @@ CSRF_COOKIE_HTTPONLY = False ALLOWED_HOSTS = [ - '.osf.io' + '.osf.io', ] AUTH_PASSWORD_VALIDATORS = [ diff --git a/admin/base/urls.py b/admin/base/urls.py index 332ddcff88f..d19d2dc638b 100644 --- a/admin/base/urls.py +++ b/admin/base/urls.py @@ -30,7 +30,6 @@ re_path(r'^maintenance/', include('admin.maintenance.urls', namespace='maintenance')), re_path(r'^meetings/', include('admin.meetings.urls', namespace='meetings')), re_path(r'^metrics/', include('admin.metrics.urls', namespace='metrics')), - re_path(r'^osf_groups/', include('admin.osf_groups.urls', namespace='osf_groups')), re_path(r'^management/', include('admin.management.urls', namespace='management')), re_path(r'^internet_archive/', include('admin.internet_archive.urls', namespace='internet_archive')), re_path(r'^schema_responses/', include('admin.schema_responses.urls', namespace='schema_responses')), diff --git a/admin/common_auth/forms.py b/admin/common_auth/forms.py index a13905bd572..aed87e67a6d 100644 --- a/admin/common_auth/forms.py +++ b/admin/common_auth/forms.py @@ -22,7 +22,7 @@ class UserRegistrationForm(forms.Form): # TODO: Moving to guardian, find a better way to distinguish "admin-like" groups from object permission groups group_perms = forms.ModelMultipleChoiceField( - queryset=Group.objects.exclude(Q(name__startswith='collections_') | Q(name__startswith='reviews_') | Q(name__startswith='preprint_') | Q(name__startswith='node_') | Q(name__startswith='osfgroup_') | Q(name__startswith='draft_registration_')), + queryset=Group.objects.exclude(Q(name__startswith='collections_') | Q(name__startswith='reviews_') | Q(name__startswith='preprint_') | Q(name__startswith='node_') | Q(name__startswith='draft_registration_')), required=False, widget=forms.CheckboxSelectMultiple ) diff --git a/admin/nodes/templatetags/node_extras.py b/admin/nodes/templatetags/node_extras.py index 801c7004f39..4fb9606f22e 100644 --- a/admin/nodes/templatetags/node_extras.py +++ b/admin/nodes/templatetags/node_extras.py @@ -39,11 +39,6 @@ def reverse_user(user): return reverse('users:user', kwargs={'guid': user._id}) -@register.filter -def reverse_osf_group(value): - return reverse('osf_groups:osf_group', kwargs={'id': value._id}) - - @register.filter def reverse_registration_provider(value): return reverse('registration_providers:detail', kwargs={'registration_provider_id': value.provider.id}) diff --git a/admin/notifications/views.py b/admin/notifications/views.py index 7a3a13a8df8..3546878e9af 100644 --- a/admin/notifications/views.py +++ b/admin/notifications/views.py @@ -1,17 +1,17 @@ -from osf.models.notifications import NotificationSubscription +from osf.models.notifications import NotificationSubscriptionLegacy from django.db.models import Count def delete_selected_notifications(selected_ids): - NotificationSubscription.objects.filter(id__in=selected_ids).delete() + NotificationSubscriptionLegacy.objects.filter(id__in=selected_ids).delete() def detect_duplicate_notifications(node_id=None): - query = NotificationSubscription.objects.values('_id').annotate(count=Count('_id')).filter(count__gt=1) + query = NotificationSubscriptionLegacy.objects.values('_id').annotate(count=Count('_id')).filter(count__gt=1) if node_id: query = query.filter(node_id=node_id) detailed_duplicates = [] for dup in query: - notifications = NotificationSubscription.objects.filter( + notifications = NotificationSubscriptionLegacy.objects.filter( _id=dup['_id'] ).order_by('created') diff --git a/admin/osf_groups/__init__.py b/admin/osf_groups/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/admin/osf_groups/forms.py b/admin/osf_groups/forms.py deleted file mode 100644 index 3e52ec2de9e..00000000000 --- a/admin/osf_groups/forms.py +++ /dev/null @@ -1,6 +0,0 @@ -from django import forms - - -class OSFGroupSearchForm(forms.Form): - name = forms.CharField(label='name', required=False) - id = forms.CharField(label='id', required=False) diff --git a/admin/osf_groups/urls.py b/admin/osf_groups/urls.py deleted file mode 100644 index 15250df2012..00000000000 --- a/admin/osf_groups/urls.py +++ /dev/null @@ -1,10 +0,0 @@ -from django.urls import re_path -from admin.osf_groups import views - -app_name = 'admin' - -urlpatterns = [ - re_path(r'^$', views.OSFGroupsListView.as_view(), name='osf_groups_list'), - re_path(r'^search/$', views.OSFGroupsFormView.as_view(), name='search'), - re_path(r'^(?P[a-z0-9]+)/$', views.OSFGroupsView.as_view(), name='osf_group'), -] diff --git a/admin/osf_groups/views.py b/admin/osf_groups/views.py deleted file mode 100644 index 8e3a9345709..00000000000 --- a/admin/osf_groups/views.py +++ /dev/null @@ -1,77 +0,0 @@ -from django.contrib.auth.mixins import PermissionRequiredMixin -from django.urls import reverse -from django.views.generic import FormView, ListView - -from osf.models import OSFGroup -from admin.osf_groups.forms import OSFGroupSearchForm -from admin.base.views import GuidView - - -class OSFGroupsView(PermissionRequiredMixin, GuidView): - """ Allow authorized admin user to view an osf group - """ - template_name = 'osf_groups/osf_groups.html' - context_object_name = 'group' - permission_required = 'osf.view_group' - raise_exception = True - - def get_object(self, queryset=None): - id = self.kwargs.get('id') - osf_group = OSFGroup.objects.get(_id=id) - return osf_group - - -class OSFGroupsFormView(PermissionRequiredMixin, FormView): - template_name = 'osf_groups/search.html' - object_type = 'osf_group' - permission_required = 'osf.view_group' - raise_exception = True - form_class = OSFGroupSearchForm - - def __init__(self): - self.redirect_url = None - super().__init__() - - def form_valid(self, form): - id = form.data.get('id').strip() - name = form.data.get('name').strip() - self.redirect_url = reverse('osf_groups:search') - - if id: - self.redirect_url = reverse('osf_groups:osf_group', kwargs={'id': id}) - elif name: - self.redirect_url = reverse('osf_groups:osf_groups_list',) + f'?name={name}' - - return super().form_valid(form) - - @property - def success_url(self): - return self.redirect_url - - -class OSFGroupsListView(PermissionRequiredMixin, ListView): - """ Allow authorized admin user to view list of osf groups - """ - template_name = 'osf_groups/osf_groups_list.html' - paginate_by = 10 - paginate_orphans = 1 - permission_required = 'osf.view_group' - raise_exception = True - - def get_queryset(self): - name = self.request.GET.get('name') - if name: - return OSFGroup.objects.filter(name__icontains=name) - - return OSFGroup.objects.all() - - def get_context_data(self, **kwargs): - query_set = kwargs.pop('object_list', self.object_list) - page_size = self.get_paginate_by(query_set) - paginator, page, query_set, is_paginated = self.paginate_queryset( - query_set, page_size) - - return { - 'groups': query_set, - 'page': page, - } diff --git a/admin/templates/base.html b/admin/templates/base.html index 2fdb5e7cb12..e6f10794c29 100644 --- a/admin/templates/base.html +++ b/admin/templates/base.html @@ -288,9 +288,6 @@ {% endif %} {% endif %} - {% if perms.osf.view_conference %} -
  • Meetings
  • - {% endif %} {% if perms.osf.view_metrics %}
  • Metrics
  • {% endif %} @@ -300,17 +297,6 @@ {% if perms.osf.view_management%}
  • Management Commands
  • {% endif %} - {% if perms.osf.view_osf_groups %} -
  • - OSF Groups -
  • -
    - -
    - {% endif %} {% if perms.osf.view_scheduledbanner %}
  • Banners diff --git a/admin/templates/osf_groups/osf_groups.html b/admin/templates/osf_groups/osf_groups.html deleted file mode 100644 index fefcfd759d9..00000000000 --- a/admin/templates/osf_groups/osf_groups.html +++ /dev/null @@ -1,98 +0,0 @@ -{% extends 'base.html' %} -{% load static %} -{% block title %} -{% load node_extras %} -OSF Group -{% endblock title %} -{% block content %} -
    - -
    -
    -

    OSF Group Detail

    -
    -
    -
    -
    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - {% if group.members %} - - {% endif %} - - {% if group.nodes %} - - - - - - {% endif %} - -
    FieldValue
    _id{{ group.id }}
    Name{{ group.name }}
    Date Created{{ group.created }}
    Date Modified{{ group.modified }}
    Creator{{ group.creator.name }}
    Managers - - - -
    Members - - - -
    Nodes - - - -
    -
    -
    -{% endblock content %} diff --git a/admin/templates/osf_groups/osf_groups_list.html b/admin/templates/osf_groups/osf_groups_list.html deleted file mode 100644 index 48bf3b0eb10..00000000000 --- a/admin/templates/osf_groups/osf_groups_list.html +++ /dev/null @@ -1,40 +0,0 @@ -{% extends "base.html" %} -{% load node_extras %} - -{% load static %} -{% block title %} - OSF Groups -{% endblock title %} -{% block content %} -

    List of Groups

    -{% include "util/pagination.html" with items=page status=status %} - - - - - - - - - - - {% for group in groups %} - - - - - - - {% endfor %} - -
    NameDate CreatedDate ModifiedCreator
    - {{ group.name }} - - {{ group.created }} - - {{ group.modified }} - - {{ group.creator.name }} -
    - -{% endblock content %} diff --git a/admin/templates/osf_groups/search.html b/admin/templates/osf_groups/search.html deleted file mode 100644 index e11d19f20c7..00000000000 --- a/admin/templates/osf_groups/search.html +++ /dev/null @@ -1,33 +0,0 @@ -{% extends 'base.html' %} -{% load static %} -{% block title %} -OSF Groups Search -{% endblock title %} -{% block content %} -
    -
    -
      - {% for message in messages %} - {{ message }} - {% endfor %} -
    -
    -
    -
    - {% csrf_token %} - {% if form.errors %} -
    {{ form.errors }}
    - {% endif %} -
    - - {{ form.id }} -
    -
    - - {{ form.name }} -
    - -
    -
    -
    -{% endblock content %} diff --git a/admin_tests/meetings/__init__.py b/admin_tests/meetings/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/admin_tests/meetings/test_forms.py b/admin_tests/meetings/test_forms.py deleted file mode 100644 index 0417ffeeb8c..00000000000 --- a/admin_tests/meetings/test_forms.py +++ /dev/null @@ -1,80 +0,0 @@ -from tests.base import AdminTestCase -from osf_tests.factories import AuthUserFactory -from tests.test_conferences import ConferenceFactory - -from admin.meetings.forms import MeetingForm, MultiEmailField - -data = dict( - edit='False', - endpoint='short', - name='Much longer', - info_url='http://something.com', - logo_url='http://osf.io/eg634', - active='True', - admins='zzz@email.org', - public_projects='True', - poster='True', - talk='True', - submission1='poster', - submission2='talk', - submission1_plural='posters', - submission2_plural='talks', - meeting_title_type='Of course', - add_submission='No more', - mail_subject='Awesome', - mail_message_body='Nothings', - mail_attachment='Again', - homepage_link_text='Need to add to tests', -) - - -class TestMultiEmailField(AdminTestCase): - def test_to_python_nothing(self): - field = MultiEmailField() - res = field.to_python('') - assert res == [] - - def test_to_python_one(self): - field = MultiEmailField() - res = field.to_python('aaa@email.org') - assert res == ['aaa@email.org'] - - def test_to_python_more(self): - field = MultiEmailField() - res = field.to_python('aaa@email.org, bbb@email.org, ccc@email.org') - assert res == ['aaa@email.org', 'bbb@email.org', 'ccc@email.org'] - - -class TestMeetingForm(AdminTestCase): - def setUp(self): - super().setUp() - self.user = AuthUserFactory() - - def test_clean_admins_raise(self): - form = MeetingForm(data=data) - assert not form.is_valid() - assert 'admins' in form.errors - assert 'zzz@email.org' in form.errors['admins'][0] - assert 'does not have an OSF account' in form.errors['admins'][0] - - def test_clean_admins_okay(self): - mod_data = dict(data) - mod_data.update({'admins': self.user.emails.values_list('address', flat=True).first()}) - form = MeetingForm(data=mod_data) - assert form.is_valid() - - def test_clean_endpoint_raise_not_exist(self): - mod_data = dict(data) - mod_data.update({'admins': self.user.emails.values_list('address', flat=True).first(), 'edit': 'True'}) - form = MeetingForm(data=mod_data) - assert 'endpoint' in form.errors - assert 'Meeting not found with this endpoint to update' == form.errors['endpoint'][0] - - def test_clean_endpoint_raise_exists(self): - conf = ConferenceFactory() - mod_data = dict(data) - mod_data.update({'admins': self.user.emails.values_list('address', flat=True).first(), - 'endpoint': conf.endpoint}) - form = MeetingForm(data=mod_data) - assert 'endpoint' in form.errors - assert 'A meeting with this endpoint exists already.' == form.errors['endpoint'][0] diff --git a/admin_tests/meetings/test_serializers.py b/admin_tests/meetings/test_serializers.py deleted file mode 100644 index 93fd7f6ac7c..00000000000 --- a/admin_tests/meetings/test_serializers.py +++ /dev/null @@ -1,23 +0,0 @@ -from tests.base import AdminTestCase -from tests.test_conferences import ConferenceFactory - -from admin.meetings.serializers import serialize_meeting - - -class TestsSerializeMeeting(AdminTestCase): - def setUp(self): - super().setUp() - self.conf = ConferenceFactory() - - def test_serialize(self): - res = serialize_meeting(self.conf) - assert isinstance(res, dict) - assert res['endpoint'] == self.conf.endpoint - assert res['name'] == self.conf.name - assert res['info_url'] == self.conf.info_url - assert res['logo_url'] == self.conf.logo_url - assert res['active'] == self.conf.active - assert res['public_projects'] == self.conf.public_projects - assert res['poster'] == self.conf.poster - assert res['talk'] == self.conf.talk - assert res['num_submissions'] == self.conf.valid_submissions.count() diff --git a/admin_tests/meetings/test_views.py b/admin_tests/meetings/test_views.py deleted file mode 100644 index bca7adbc14b..00000000000 --- a/admin_tests/meetings/test_views.py +++ /dev/null @@ -1,195 +0,0 @@ -import pytest -from django.test import RequestFactory -from django.http import Http404 -from django.urls import reverse -from django.contrib.auth.models import Permission -from django.core.exceptions import PermissionDenied - -from tests.base import AdminTestCase -from osf_tests.factories import AuthUserFactory -from tests.test_conferences import ConferenceFactory -from osf.models.conference import Conference, DEFAULT_FIELD_NAMES - -from admin_tests.utilities import setup_form_view -from admin_tests.meetings.test_forms import data -from admin.meetings.views import ( - MeetingListView, - MeetingCreateFormView, - MeetingFormView, - get_custom_fields, - get_admin_users, -) -from admin.meetings.forms import MeetingForm - - -class TestMeetingListView(AdminTestCase): - def setUp(self): - super().setUp() - Conference.objects.all().delete() - ConferenceFactory() - ConferenceFactory() - ConferenceFactory() - - def test_get_queryset(self): - view = MeetingListView() - assert len(view.get_queryset()) == 3 - - def test_no_user_permissions_raises_error(self): - user = AuthUserFactory() - request = RequestFactory().get(reverse('meetings:list')) - request.user = user - - with pytest.raises(PermissionDenied): - MeetingListView.as_view()(request) - - def test_correct_view_permissions(self): - user = AuthUserFactory() - - view_permission = Permission.objects.get(codename='view_conference') - user.user_permissions.add(view_permission) - user.save() - - request = RequestFactory().get(reverse('meetings:list')) - request.user = user - - response = MeetingListView.as_view()(request) - assert response.status_code == 200 - - -class TestMeetingFormView(AdminTestCase): - def setUp(self): - super().setUp() - self.conf = ConferenceFactory() - self.user = AuthUserFactory() - self.request = RequestFactory().post('/fake_path') - self.view = MeetingFormView - mod_data = dict(data) - mod_data.update({ - 'edit': 'True', - 'endpoint': self.conf.endpoint, - 'admins': self.user.emails.first().address, - 'location': 'Timbuktu, Mali', - 'start date': 'Dec 11 2014', - 'end_date': 'Jan 12 2013' - }) - self.form = MeetingForm(data=mod_data) - self.form.is_valid() - - self.url = reverse('meetings:detail', kwargs={'endpoint': self.conf.endpoint}) - - def test_dispatch_raise_404(self): - view = setup_form_view(self.view(), self.request, self.form, - endpoint='meh') - with pytest.raises(Http404): - view.dispatch(self.request, endpoint='meh') - - def test_get_context(self): - view = setup_form_view(self.view(), self.request, self.form, - endpoint=self.conf.endpoint) - view.conf = self.conf - res = view.get_context_data() - assert isinstance(res, dict) - assert 'endpoint' in res - assert res['endpoint'] == self.conf.endpoint - - def test_get_initial(self): - view = setup_form_view(self.view(), self.request, self.form, - endpoint=self.conf.endpoint) - view.conf = self.conf - res = view.get_initial() - assert isinstance(res, dict) - assert 'endpoint' in res - assert 'submission2_plural' in res - - def test_form_valid(self): - view = setup_form_view(self.view(), self.request, self.form, - endpoint=self.conf.endpoint) - view.conf = self.conf - view.form_valid(self.form) - self.conf.reload() - assert self.conf.admins.all()[0].emails.first().address == self.user.emails.first().address - assert self.conf.location == self.form.cleaned_data['location'] - assert self.conf.start_date == self.form.cleaned_data['start_date'] - - def test_no_user_permissions_raises_error(self): - request = RequestFactory().get(self.url) - request.user = self.user - - with pytest.raises(PermissionDenied): - self.view.as_view()(request, endpoint=self.conf.endpoint) - - def test_correct_view_permissions(self): - - view_permission = Permission.objects.get(codename='change_conference') - self.user.user_permissions.add(view_permission) - self.user.save() - - request = RequestFactory().get(self.url) - request.user = self.user - - response = self.view.as_view()(request, endpoint=self.conf.endpoint) - assert response.status_code == 200 - - -class TestMeetingCreateFormView(AdminTestCase): - def setUp(self): - super().setUp() - Conference.objects.all().delete() - self.user = AuthUserFactory() - self.request = RequestFactory().post('/fake_path') - self.view = MeetingCreateFormView - mod_data = dict(data) - mod_data.update({'admins': self.user.emails.first().address}) - self.form = MeetingForm(data=mod_data) - self.form.is_valid() - - self.url = reverse('meetings:create') - - def test_get_initial(self): - self.view().get_initial() - assert not self.view().initial['edit'] - assert self.view.initial['submission1'] == DEFAULT_FIELD_NAMES['submission1'] - - def test_form_valid(self): - view = setup_form_view(self.view(), self.request, self.form) - view.form_valid(self.form) - assert Conference.objects.filter(endpoint=data['endpoint']).count() == 1 - - def test_no_user_permissions_raises_error(self): - request = RequestFactory().get(self.url) - request.user = self.user - - with pytest.raises(PermissionDenied): - self.view.as_view()(request) - - def test_correct_view_permissions(self): - change_permission = Permission.objects.get(codename='view_conference') - view_permission = Permission.objects.get(codename='change_conference') - self.user.user_permissions.add(view_permission) - self.user.user_permissions.add(change_permission) - self.user.save() - - request = RequestFactory().get(self.url) - request.user = self.user - - response = self.view.as_view()(request) - assert response.status_code == 200 - - -class TestMeetingMisc(AdminTestCase): - def test_get_custom_fields(self): - res1, res2 = get_custom_fields(data) - assert isinstance(res1, dict) - assert isinstance(res2, dict) - for key in res1.keys(): - assert 'field' not in key - - def test_get_admin_users(self): - user_1 = AuthUserFactory() - user_2 = AuthUserFactory() - user_3 = AuthUserFactory() - emails = [user_1.emails.first().address, user_2.emails.first().address, user_3.emails.first().address] - res = get_admin_users(emails) - assert user_1 in res - assert user_2 in res - assert user_3 in res diff --git a/admin_tests/notifications/test_views.py b/admin_tests/notifications/test_views.py index 08ad695edd1..42d182a77e5 100644 --- a/admin_tests/notifications/test_views.py +++ b/admin_tests/notifications/test_views.py @@ -1,10 +1,11 @@ import pytest from django.test import RequestFactory -from osf.models import OSFUser, NotificationSubscription, Node +from osf.models import OSFUser, Node from admin.notifications.views import ( delete_selected_notifications, detect_duplicate_notifications, ) +from osf.models.notifications import NotificationSubscriptionLegacy from tests.base import AdminTestCase pytestmark = pytest.mark.django_db @@ -18,19 +19,19 @@ def setUp(self): self.request_factory = RequestFactory() def test_delete_selected_notifications(self): - notification1 = NotificationSubscription.objects.create(user=self.user, node=self.node, event_name='event1') - notification2 = NotificationSubscription.objects.create(user=self.user, node=self.node, event_name='event2') - notification3 = NotificationSubscription.objects.create(user=self.user, node=self.node, event_name='event3') + notification1 = NotificationSubscriptionLegacy.objects.create(user=self.user, node=self.node, event_name='event1') + notification2 = NotificationSubscriptionLegacy.objects.create(user=self.user, node=self.node, event_name='event2') + notification3 = NotificationSubscriptionLegacy.objects.create(user=self.user, node=self.node, event_name='event3') delete_selected_notifications([notification1.id, notification2.id]) - assert not NotificationSubscription.objects.filter(id__in=[notification1.id, notification2.id]).exists() - assert NotificationSubscription.objects.filter(id=notification3.id).exists() + assert not NotificationSubscriptionLegacy.objects.filter(id__in=[notification1.id, notification2.id]).exists() + assert NotificationSubscriptionLegacy.objects.filter(id=notification3.id).exists() def test_detect_duplicate_notifications(self): - NotificationSubscription.objects.create(user=self.user, node=self.node, event_name='event1') - NotificationSubscription.objects.create(user=self.user, node=self.node, event_name='event1') - NotificationSubscription.objects.create(user=self.user, node=self.node, event_name='event2') + NotificationSubscriptionLegacy.objects.create(user=self.user, node=self.node, event_name='event1') + NotificationSubscriptionLegacy.objects.create(user=self.user, node=self.node, event_name='event1') + NotificationSubscriptionLegacy.objects.create(user=self.user, node=self.node, event_name='event2') duplicates = detect_duplicate_notifications() diff --git a/admin_tests/osf_groups/__init__.py b/admin_tests/osf_groups/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/admin_tests/osf_groups/test_views.py b/admin_tests/osf_groups/test_views.py deleted file mode 100644 index 12063a93c05..00000000000 --- a/admin_tests/osf_groups/test_views.py +++ /dev/null @@ -1,67 +0,0 @@ -from admin.osf_groups.views import ( - OSFGroupsListView, - OSFGroupsFormView -) -from admin_tests.utilities import setup_log_view -from django.test import RequestFactory - -from tests.base import AdminTestCase -from osf_tests.factories import UserFactory, OSFGroupFactory - - -class TestOSFGroupsListView(AdminTestCase): - - def setUp(self): - super().setUp() - self.user = UserFactory() - self.group = OSFGroupFactory(name='Brian Dawkins', creator=self.user) - self.group2 = OSFGroupFactory(name='Brian Westbrook', creator=self.user) - self.group3 = OSFGroupFactory(name='Darren Sproles', creator=self.user) - self.request = RequestFactory().post('/fake_path') - self.view = OSFGroupsListView() - - def test_get_default_queryset(self): - view = setup_log_view(self.view, self.request) - - queryset = view.get_queryset() - - assert len(queryset) == 3 - - assert self.group in queryset - assert self.group2 in queryset - assert self.group3 in queryset - - def test_get_queryset_by_name(self): - request = RequestFactory().post('/fake_path/?name=Brian') - view = setup_log_view(self.view, request) - - queryset = view.get_queryset() - - assert len(queryset) == 2 - - assert self.group in queryset - assert self.group2 in queryset - - -class TestOSFGroupsFormView(AdminTestCase): - - def setUp(self): - super().setUp() - self.user = UserFactory() - self.group = OSFGroupFactory(name='Brian Dawkins', creator=self.user) - self.group2 = OSFGroupFactory(name='Brian Westbrook', creator=self.user) - self.view = OSFGroupsFormView() - - def test_post_id(self): - request = RequestFactory().post('/fake_path', data={'id': self.group._id, 'name': ''}) - view = setup_log_view(self.view, request) - - redirect = view.post(request) - assert redirect.url == f'/osf_groups/{self.group._id}/' - - def test_post_name(self): - request = RequestFactory().post('/fake_path', data={'id': '', 'name': 'Brian'}) - view = setup_log_view(self.view, request) - - redirect = view.post(request) - assert redirect.url == '/osf_groups/?name=Brian' diff --git a/api/base/urls.py b/api/base/urls.py index f7e4cb74e71..142e2df34c2 100644 --- a/api/base/urls.py +++ b/api/base/urls.py @@ -53,7 +53,6 @@ re_path(r'^draft_nodes/', include('api.draft_nodes.urls', namespace='draft_nodes')), re_path(r'^draft_registrations/', include('api.draft_registrations.urls', namespace='draft_registrations')), re_path(r'^files/', include('api.files.urls', namespace='files')), - re_path(r'^groups/', include('api.osf_groups.urls', namespace='groups')), re_path(r'^guids/', include('api.guids.urls', namespace='guids')), re_path(r'^identifiers/', include('api.identifiers.urls', namespace='identifiers')), re_path(r'^institutions/', include('api.institutions.urls', namespace='institutions')), diff --git a/api/caching/tasks.py b/api/caching/tasks.py index e6d9492714b..a2a7753f51c 100644 --- a/api/caching/tasks.py +++ b/api/caching/tasks.py @@ -181,7 +181,7 @@ def update_storage_usage(target): # for fetching files we use AbstractNode instances, this is why we use branched_from property if isinstance(target, DraftRegistration): enqueue_postcommit_task(update_storage_usage_cache, (target.branched_from.id, target.branched_from._id), {}, celery=True) - elif not isinstance(target, Preprint) and not target.is_quickfiles: + elif not isinstance(target, Preprint): enqueue_postcommit_task(update_storage_usage_cache, (target.id, target._id), {}, celery=True) def update_storage_usage_with_size(payload): @@ -194,9 +194,6 @@ def update_storage_usage_with_size(payload): return target_node = AbstractNode.load(metadata['nid']) - if target_node.is_quickfiles: - return - action = payload['action'] provider = metadata.get('provider', 'osfstorage') @@ -225,7 +222,7 @@ def update_storage_usage_with_size(payload): source_provider = payload['source']['provider'] if target_node == source_node and source_provider == provider: return # Its not going anywhere. - if source_provider == 'osfstorage' and not source_node.is_quickfiles: + if source_provider == 'osfstorage': if source_node.storage_limit_status is settings.StorageLimits.NOT_CALCULATED: return update_storage_usage(source_node) diff --git a/api/files/serializers.py b/api/files/serializers.py index e68845c4cd1..1fa8f2e2264 100644 --- a/api/files/serializers.py +++ b/api/files/serializers.py @@ -448,18 +448,6 @@ def to_representation(self, value): return data -class QuickFilesSerializer(BaseFileSerializer): - user = RelationshipField( - related_view='users:user-detail', - related_view_kwargs={'user_id': ''}, - help_text='The user who uploaded this file', - ) - - -class QuickFilesDetailSerializer(QuickFilesSerializer): - id = IDField(source='_id', required=True) - - class FileVersionSerializer(JSONAPISerializer): filterable_fields = frozenset([ 'id', diff --git a/api/files/views.py b/api/files/views.py index 5a498fa7089..bd2eb9979cd 100644 --- a/api/files/views.py +++ b/api/files/views.py @@ -24,11 +24,12 @@ from api.cedar_metadata_records.utils import can_view_record from api.nodes.permissions import ContributorOrPublic from api.files import annotations -from api.files.permissions import IsPreprintFile -from api.files.permissions import CheckedOutOrAdmin -from api.files.serializers import FileSerializer -from api.files.serializers import FileDetailSerializer -from api.files.serializers import FileVersionSerializer +from api.files.permissions import IsPreprintFile, CheckedOutOrAdmin +from api.files.serializers import ( + FileSerializer, + FileDetailSerializer, + FileVersionSerializer, +) from osf.utils.permissions import ADMIN @@ -53,10 +54,6 @@ def get_file(self, check_permissions=True): if getattr(obj.target, 'deleted', None): raise Gone(detail='The requested file is no longer available') - if getattr(obj.target, 'is_quickfiles', False) and getattr(obj.target, 'creator'): - if obj.target.creator.is_disabled: - raise Gone(detail='This user has been deactivated and their quickfiles are no longer available.') - if getattr(obj.target, 'is_retracted', False): raise Gone(detail='The requested file is no longer available.') @@ -85,9 +82,6 @@ class FileDetail(JSONAPIBaseView, generics.RetrieveUpdateAPIView, FileMixin): view_category = 'files' view_name = 'file-detail' - def get_serializer_class(self): - return FileDetailSerializer - def get_target(self): return self.get_file().target @@ -97,8 +91,7 @@ def get_object(self): file = self.get_file() if self.request.GET.get('create_guid', False): - # allows quickfiles to be given guids when another user wants a permanent link to it - if (self.get_target().has_permission(user, ADMIN) and utils.has_admin_scope(self.request)) or getattr(file.target, 'is_quickfiles', False): + if (self.get_target().has_permission(user, ADMIN) and utils.has_admin_scope(self.request)): file.get_guid(create=True) # We normally would pass this through `get_file` as an annotation, but the `select_for_update` feature prevents diff --git a/api/logs/serializers.py b/api/logs/serializers.py index 85e7a8058c6..5d5df3fc882 100644 --- a/api/logs/serializers.py +++ b/api/logs/serializers.py @@ -257,11 +257,6 @@ class Meta: related_view_kwargs={'node_id': ''}, ) - group = RelationshipField( - related_view='groups:group-detail', - related_view_kwargs={'group_id': ''}, - ) - def get_absolute_url(self, obj): return obj.absolute_url diff --git a/api/nodes/permissions.py b/api/nodes/permissions.py index cf42b5a501e..5fc16f6cf16 100644 --- a/api/nodes/permissions.py +++ b/api/nodes/permissions.py @@ -10,7 +10,6 @@ Institution, Node, NodeRelation, - OSFGroup, OSFUser, Preprint, PrivateLink, @@ -218,7 +217,7 @@ class NodeGroupDetailPermissions(permissions.BasePermission): """Permissions for node group detail - involving who can update the relationship between a node and an OSF Group.""" - acceptable_models = (OSFGroup, AbstractNode) + acceptable_models = (AbstractNode,) def load_resource(self, context, view): return AbstractNode.load(context[view.node_lookup_url_kwarg]) diff --git a/api/nodes/serializers.py b/api/nodes/serializers.py index 341c589d8aa..e4e5e01e983 100644 --- a/api/nodes/serializers.py +++ b/api/nodes/serializers.py @@ -37,7 +37,7 @@ from osf.models import ( Comment, DraftRegistration, ExternalAccount, RegistrationSchema, AbstractNode, PrivateLink, Preprint, - RegistrationProvider, OSFGroup, NodeLicense, DraftNode, + RegistrationProvider, NodeLicense, DraftNode, Registration, Node, ) from website.project import new_private_link @@ -392,11 +392,6 @@ class NodeSerializer(TaxonomizableSerializerMixin, JSONAPISerializer): related_meta={'count': 'get_forks_count'}, ) - groups = RelationshipField( - related_view='nodes:node-groups', - related_view_kwargs={'node_id': '<_id>'}, - ) - node_links = ShowIfVersion( RelationshipField( related_view='nodes:node-pointers', @@ -816,9 +811,6 @@ def create(self, validated_data): except ValidationError as e: raise InvalidModelValueError(detail=list(e)[0]) node.add_contributors(contributors, auth=auth, log=True, save=True) - for group in parent.osf_groups: - if group.is_manager(user): - node.add_osf_group(group, group.get_permission_to_node(parent), auth=auth) if is_truthy(request.GET.get('inherit_subjects')) and validated_data['parent'].has_permission(user, osf_permissions.WRITE): parent = validated_data['parent'] node.subjects.add(parent.subjects.all()) @@ -1903,120 +1895,3 @@ def enable_or_disable_addon(self, obj, should_enable, addon_name, auth): if isinstance(addon, bool): addon = None return addon - - -class NodeGroupsSerializer(JSONAPISerializer): - filterable_fields = frozenset([ - 'name', - 'permission', - 'date_created', - ]) - - writeable_method_fields = frozenset([ - 'permission', - ]) - - non_anonymized_fields = [ - 'type', - 'permission', - ] - - id = CompoundIDField(source='_id', read_only=True) - type = TypeField() - permission = ser.SerializerMethodField() - name = ser.CharField(read_only=True) - date_created = VersionedDateTimeField(source='created', read_only=True) - date_modified = VersionedDateTimeField(source='modified', read_only=True) - - groups = RelationshipField( - related_view='groups:group-detail', - related_view_kwargs={'group_id': '<_id>'}, - required=False, - ) - - links = LinksField({ - 'self': 'get_absolute_url', - }) - - def get_absolute_url(self, obj): - node = self.context['node'] - return absolute_reverse( - 'nodes:node-group-detail', kwargs={ - 'group_id': obj._id, - 'node_id': node._id, - 'version': self.context['request'].parser_context['kwargs']['version'], - }, - ) - - def get_permission(self, obj): - node = self.context['node'] - return obj.get_permission_to_node(node) - - class Meta: - type_ = 'node-groups' - - -class NodeGroupsCreateSerializer(NodeGroupsSerializer): - """ - Overrides NodeGroupSerializer so groups relationship is properly parsed - (JSONAPIParser will flatten groups relationship into {'_id': 'group_id'}, - so _id field needs to be writeable so it's not dropped from validated_data) - - """ - id = IDField(source='_id', required=False, allow_null=True) - - groups = RelationshipField( - related_view='groups:group-detail', - related_view_kwargs={'group_id': '<_id>'}, - required=False, - ) - - def load_osf_group(self, _id): - if not _id: - raise exceptions.ValidationError(detail='Group relationship must be specified.') - try: - osf_group = OSFGroup.objects.get(_id=_id) - except OSFGroup.DoesNotExist: - raise exceptions.NotFound(detail=f'Group {_id} is invalid.') - return osf_group - - def create(self, validated_data): - auth = get_user_auth(self.context['request']) - node = self.context['node'] - permission = validated_data.get('permission', osf_permissions.DEFAULT_CONTRIBUTOR_PERMISSIONS) - group = self.load_osf_group(validated_data.get('_id')) - if group in node.osf_groups: - raise exceptions.ValidationError( - f'The group {group._id} has already been added to the node {node._id}', - ) - - try: - node.add_osf_group(group, permission, auth) - except PermissionsError as e: - raise exceptions.PermissionDenied(detail=str(e)) - except ValueError as e: - # permission is in writeable_method_fields, so validation happens on OSF Group model - raise exceptions.ValidationError(detail=str(e)) - return group - - -class NodeGroupsDetailSerializer(NodeGroupsSerializer): - """ - Overrides NodeGroupsSerializer to make id required. Adds update method here. - """ - id = CompoundIDField(source='_id', required=True) - - def update(self, obj, validated_data): - auth = get_user_auth(self.context['request']) - node = self.context['node'] - permission = validated_data.get('permission') - if not permission: - return obj - try: - node.update_osf_group(obj, permission, auth) - except PermissionsError as e: - raise exceptions.PermissionDenied(detail=str(e.message)) - except ValueError as e: - # permission is in writeable_method_fields, so validation happens on OSF Group model - raise exceptions.ValidationError(detail=str(e)) - return obj diff --git a/api/nodes/urls.py b/api/nodes/urls.py index 20886a72f39..4c409d1be7d 100644 --- a/api/nodes/urls.py +++ b/api/nodes/urls.py @@ -31,8 +31,6 @@ re_path(r'^(?P\w+)/files/(?P[a-zA-Z0-9\-]*)(?P/(?:.*/)?)$', views.NodeFilesList.as_view(), name=views.NodeFilesList.view_name), re_path(r'^(?P\w+)/files/(?P[a-zA-Z0-9\-]*)(?P/.+[^/])$', views.NodeFileDetail.as_view(), name=views.NodeFileDetail.view_name), re_path(r'^(?P\w+)/forks/$', views.NodeForksList.as_view(), name=views.NodeForksList.view_name), - re_path(r'^(?P\w+)/groups/$', views.NodeGroupsList.as_view(), name=views.NodeGroupsList.view_name), - re_path(r'^(?P\w+)/groups/(?P\w+)/$', views.NodeGroupsDetail.as_view(), name=views.NodeGroupsDetail.view_name), re_path(r'^(?P\w+)/identifiers/$', views.NodeIdentifierList.as_view(), name=views.NodeIdentifierList.view_name), re_path(r'^(?P\w+)/institutions/$', views.NodeInstitutionsList.as_view(), name=views.NodeInstitutionsList.view_name), re_path(r'^(?P\w+)/linked_nodes/$', views.LinkedNodesList.as_view(), name=views.LinkedNodesList.view_name), diff --git a/api/nodes/views.py b/api/nodes/views.py index 87e8a4aabc7..8e5352f6f30 100644 --- a/api/nodes/views.py +++ b/api/nodes/views.py @@ -63,7 +63,6 @@ LinkedRegistrationsRelationship, WaterButlerMixin, ) -from api.base.waffle_decorators import require_flag from api.base.permissions import WriteOrPublicForRelationshipInstitutions from api.cedar_metadata_records.serializers import CedarMetadataRecordsListSerializer from api.cedar_metadata_records.utils import can_view_record @@ -92,7 +91,6 @@ RegistrationAndPermissionCheckForPointers, ContributorDetailPermissions, ReadOnlyIfRegistration, - NodeGroupDetailPermissions, IsContributorOrGroupMember, AdminDeletePermissions, ExcludeWithdrawals, @@ -120,12 +118,8 @@ NodeStorageSerializer, NodeCitationSerializer, NodeCitationStyleSerializer, - NodeGroupsSerializer, - NodeGroupsCreateSerializer, - NodeGroupsDetailSerializer, ) from api.nodes.utils import NodeOptimizationMixin, enforce_no_children -from api.osf_groups.views import OSFGroupMixin from api.preprints.serializers import PreprintSerializer from api.registrations import annotations as registration_annotations from api.registrations.serializers import ( @@ -143,7 +137,6 @@ from framework.exceptions import HTTPError, PermissionsError from framework.auth.oauth_scopes import CoreScopes from framework.sentry import log_exception -from osf.features import OSF_GROUPS from osf.models import ( AbstractNode, OSFUser, @@ -154,7 +147,6 @@ DraftRegistration, Registration, BaseFileNode, - OSFGroup, NodeRelation, Guid, File, @@ -1255,111 +1247,6 @@ def get_object(self): return fobj -class NodeGroupsBase(JSONAPIBaseView, NodeMixin, OSFGroupMixin): - model_class = OSFGroup - - required_read_scopes = [CoreScopes.NODE_OSF_GROUPS_READ] - required_write_scopes = [CoreScopes.NODE_OSF_GROUPS_WRITE] - view_category = 'nodes' - - -class NodeGroupsList(NodeGroupsBase, generics.ListCreateAPIView, ListFilterMixin): - """ The documentation for this endpoint can be found [here](https://developer.osf.io/#operation/nodes_groups_list) - - """ - permission_classes = ( - drf_permissions.IsAuthenticatedOrReadOnly, - AdminOrPublic, - base_permissions.TokenHasScope, - ) - - serializer_class = NodeGroupsSerializer - view_name = 'node-groups' - - @require_flag(OSF_GROUPS) - def get_default_queryset(self): - return self.get_node().osf_groups - - def get_queryset(self): - return self.get_queryset_from_request() - - # overrides FilterMixin - def build_query_from_field(self, field_name, operation): - if field_name == 'permission': - node = self.get_node() - try: - groups_with_perm_ids = node.get_osf_groups_with_perms(operation['value']).values_list('id', flat=True) - except ValueError: - raise ValidationError('{} is not a filterable permission.'.format(operation['value'])) - return Q(id__in=groups_with_perm_ids) - - return super().build_query_from_field(field_name, operation) - - # overrides ListCreateAPIView - def get_serializer_class(self): - if self.request.method == 'POST': - return NodeGroupsCreateSerializer - else: - return NodeGroupsSerializer - - # overrides ListCreateAPIView - def get_serializer_context(self): - """ - Extra context for NodeGroupsSerializer - """ - context = super().get_serializer_context() - context['node'] = self.get_node(check_object_permissions=False) - return context - - @require_flag(OSF_GROUPS) - def perform_create(self, serializer): - return super().perform_create(serializer) - - -class NodeGroupsDetail(NodeGroupsBase, generics.RetrieveUpdateDestroyAPIView): - """ The documentation for this endpoint can be found [here](https://developer.osf.io/#operation/nodes_groups_read) - - """ - permission_classes = ( - drf_permissions.IsAuthenticatedOrReadOnly, - NodeGroupDetailPermissions, - base_permissions.TokenHasScope, - ) - - serializer_class = NodeGroupsDetailSerializer - - view_name = 'node-group-detail' - - # Overrides RetrieveUpdateDestroyAPIView - @require_flag(OSF_GROUPS) - def get_object(self): - node = self.get_node(check_object_permissions=False) - # Node permissions checked when group is loaded - group = self.get_osf_group(self.kwargs.get('group_id')) - if not group.get_permission_to_node(node): - raise NotFound(f'Group {group._id} does not have permissions to node {node._id}.') - return group - - # Overrides RetrieveUpdateDestroyAPIView - @require_flag(OSF_GROUPS) - def perform_destroy(self, instance): - node = self.get_node(check_object_permissions=False) - auth = get_user_auth(self.request) - try: - node.remove_osf_group(instance, auth) - except PermissionsError: - raise PermissionDenied('Not authorized to remove this group.') - - # Overrides RetrieveUpdateDestroyAPIView - def get_serializer_context(self): - """ - Extra context for NodeGroupsSerializer - """ - context = super().get_serializer_context() - context['node'] = self.get_node(check_object_permissions=False) - return context - - class NodeAddonList(JSONAPIBaseView, generics.ListAPIView, ListFilterMixin, NodeMixin, AddonSettingsMixin): """The documentation for this endpoint can be found [here](https://developer.osf.io/#operation/nodes_addons_list). diff --git a/api/osf_groups/__init__.py b/api/osf_groups/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/api/osf_groups/permissions.py b/api/osf_groups/permissions.py deleted file mode 100644 index b9601f05d7d..00000000000 --- a/api/osf_groups/permissions.py +++ /dev/null @@ -1,39 +0,0 @@ -from rest_framework import permissions - -from api.base.utils import assert_resource_type, get_user_auth -from osf.utils.permissions import MANAGE -from osf.models import OSFGroup, OSFUser - - -class IsGroupManager(permissions.BasePermission): - - acceptable_models = (OSFGroup,) - - def has_object_permission(self, request, view, obj): - assert_resource_type(obj, self.acceptable_models) - auth = get_user_auth(request) - - if request.method in permissions.SAFE_METHODS: - return True - else: - return auth.user and obj.has_permission(auth.user, MANAGE) - - -class GroupMemberManagement(permissions.BasePermission): - - acceptable_models = (OSFGroup, OSFUser) - - def has_object_permission(self, request, view, obj): - if not isinstance(obj, OSFGroup): - obj = OSFGroup.load(request.parser_context['kwargs']['group_id']) - assert_resource_type(obj, self.acceptable_models) - auth = get_user_auth(request) - if request.method in permissions.SAFE_METHODS: - return True - elif request.method == 'DELETE': - user = OSFUser.load(request.parser_context['kwargs']['user_id']) - # You must have manage permissions on the OSFGroup to remove a member, - # unless you are removing yourself - return obj.has_permission(auth.user, MANAGE) or auth.user == user - else: - return auth.user and obj.has_permission(auth.user, MANAGE) diff --git a/api/osf_groups/serializers.py b/api/osf_groups/serializers.py deleted file mode 100644 index 353aa121da1..00000000000 --- a/api/osf_groups/serializers.py +++ /dev/null @@ -1,200 +0,0 @@ -from rest_framework import serializers as ser, exceptions -from django.core.exceptions import ValidationError - -from framework.auth.core import Auth -from api.base.exceptions import InvalidModelValueError -from api.base.serializers import ( - IDField, - LinksField, - JSONAPISerializer, - RelationshipField, - TypeField, - VersionedDateTimeField, -) -from api.base.utils import absolute_reverse -from api.nodes.serializers import CompoundIDField -from osf.models import OSFUser -from osf.models.osf_group import OSFGroup -from osf.utils.permissions import GROUP_ROLES, MEMBER, MANAGER - - -class GroupSerializer(JSONAPISerializer): - filterable_fields = frozenset([ - 'name', - ]) - - non_anonymized_fields = [ - 'type', - ] - - id = IDField(source='_id', read_only=True) - type = TypeField() - name = ser.CharField(required=True) - date_created = VersionedDateTimeField(source='created', read_only=True) - date_modified = VersionedDateTimeField(source='modified', read_only=True) - - links = LinksField({ - 'self': 'get_absolute_url', - }) - - def get_absolute_url(self, obj): - return obj.get_absolute_url() - - members = RelationshipField( - related_view='groups:group-members', - related_view_kwargs={'group_id': '<_id>'}, - ) - - class Meta: - type_ = 'groups' - - def create(self, validated_data): - group = OSFGroup(creator=validated_data['creator'], name=validated_data['name']) - group.save() - return group - - def update(self, instance, validated_data): - if 'name' in validated_data: - instance.set_group_name(validated_data.get('name')) - instance.save() - return instance - - -class GroupDetailSerializer(GroupSerializer): - """ - Overrides GroupSerializer to make id required. - """ - id = IDField(source='_id', required=True) - - -class GroupCompoundIDField(CompoundIDField): - def _get_resource_id(self): - return self.context['request'].parser_context['kwargs']['group_id'] - - -class GroupMemberSerializer(JSONAPISerializer): - filterable_fields = frozenset([ - 'role', - 'full_name', - ]) - writeable_method_fields = frozenset([ - 'role', - ]) - non_anonymized_fields = [ - 'type', - 'role', - ] - - id = GroupCompoundIDField(source='_id', read_only=True) - type = TypeField() - role = ser.SerializerMethodField() - unregistered_member = ser.SerializerMethodField() - full_name = ser.CharField(read_only=True, source='fullname') - - users = RelationshipField( - related_view='users:user-detail', - related_view_kwargs={'user_id': '<_id>'}, - ) - - links = LinksField({ - 'self': 'get_absolute_url', - }) - - def get_role(self, user): - return user.group_role(self.context['group']) - - def get_unregistered_member(self, obj): - unclaimed_records = obj.unclaimed_records.get(self.context['group']._id, None) - if unclaimed_records: - return unclaimed_records.get('name', None) - - def get_member_method(self, group, role): - methods = { - MANAGER: group.make_manager, - MEMBER: group.make_member, - } - return methods[role] - - def get_group_role(self, validated_data, default_role): - role = validated_data.get('role', default_role) - if role not in GROUP_ROLES: - raise exceptions.ValidationError(f'{role} is not a valid role; choose manager or member.') - return role - - class Meta: - type_ = 'group-members' - - def get_absolute_url(self, obj): - return absolute_reverse( - 'groups:group-member-detail', - kwargs={ - 'user_id': obj._id, - 'group_id': self.context['request'].parser_context['kwargs']['group_id'], - 'version': self.context['request'].parser_context['kwargs']['version'], - }, - ) - - -class GroupMemberCreateSerializer(GroupMemberSerializer): - id = GroupCompoundIDField(source='_id', required=False, allow_null=True) - type = TypeField() - full_name = ser.CharField(required=False) - email = ser.EmailField(required=False, write_only=True) - - def to_representation(self, instance, envelope='data'): - """ - Use GroupMemberSerializer for the response, but GroupMemberCreateSerializer - for the request. We only want full_name to be writable on create member (for unregistered members). - User serializer endpoints should be used to edit user's full_name. - """ - return GroupMemberSerializer(instance=instance, context=self.context).data - - def get_user_object(self, user_id, group): - if user_id: - user = OSFUser.load(user_id) - if not user: - raise exceptions.NotFound(detail=f'User with id {user_id} not found.') - if group.has_permission(user, 'member'): - raise exceptions.ValidationError(detail='User is already a member of this group.') - return user - return user_id - - def create(self, validated_data): - group = self.context['group'] - user = self.get_user_object(validated_data.get('_id', None), group) - auth = Auth(self.context['request'].user) - full_name = validated_data.get('full_name', None) - email = validated_data.get('email', None) - role = self.get_group_role(validated_data, MEMBER) - - try: - if user: - self.get_member_method(group, role)(user, auth) - else: - if not full_name or not email: - raise exceptions.ValidationError(detail='You must provide a full_name/email combination to add an unconfirmed member.') - else: - user = group.add_unregistered_member(full_name, email, auth, role) - except ValueError as e: - raise exceptions.ValidationError(detail=str(e)) - except ValidationError as e: - raise InvalidModelValueError(detail=list(e)[0]) - - return user - - -class GroupMemberDetailSerializer(GroupMemberSerializer): - id = GroupCompoundIDField(source='_id', required=True) - - def update(self, user, validated_data): - group = self.context['group'] - role = self.get_group_role(validated_data, user.group_role(group)) - auth = Auth(self.context['request'].user) - - try: - # Making sure the one-manager rule isn't violated - self.get_member_method(self.context['group'], role)(user, auth) - except ValueError as e: - raise exceptions.ValidationError(detail=str(e)) - - return user diff --git a/api/osf_groups/urls.py b/api/osf_groups/urls.py deleted file mode 100644 index ad80a9a5bc8..00000000000 --- a/api/osf_groups/urls.py +++ /dev/null @@ -1,12 +0,0 @@ -from django.urls import re_path - -from api.osf_groups import views - -app_name = 'osf' - -urlpatterns = [ - re_path(r'^$', views.GroupList.as_view(), name=views.GroupList.view_name), - re_path(r'^(?P\w+)/$', views.GroupDetail.as_view(), name=views.GroupDetail.view_name), - re_path(r'^(?P\w+)/members/$', views.GroupMembersList.as_view(), name=views.GroupMembersList.view_name), - re_path(r'^(?P\w+)/members/(?P\w+)/$', views.GroupMemberDetail.as_view(), name=views.GroupMemberDetail.view_name), -] diff --git a/api/osf_groups/views.py b/api/osf_groups/views.py deleted file mode 100644 index 7593803e4f4..00000000000 --- a/api/osf_groups/views.py +++ /dev/null @@ -1,243 +0,0 @@ -from django.apps import apps -from django.db.models import Q - -from rest_framework import generics, permissions as drf_permissions -from rest_framework.exceptions import NotFound, ValidationError - -from api.base import permissions as base_permissions -from api.base.exceptions import InvalidFilterOperator, InvalidFilterValue -from api.base.filters import ListFilterMixin -from api.base.utils import get_object_or_error, get_user_auth, is_bulk_request -from api.base.views import JSONAPIBaseView -from api.base import generic_bulk_views as bulk_views -from api.base.waffle_decorators import require_flag -from api.osf_groups.permissions import IsGroupManager, GroupMemberManagement -from api.osf_groups.serializers import ( - GroupSerializer, - GroupDetailSerializer, - GroupMemberSerializer, - GroupMemberDetailSerializer, - GroupMemberCreateSerializer, -) -from api.users.views import UserMixin -from framework.auth.oauth_scopes import CoreScopes -from osf.features import OSF_GROUPS -from osf.models import OSFGroup, OSFUser -from osf.utils.permissions import MANAGER, GROUP_ROLES - - -class OSFGroupMixin: - """ - Mixin with convenience method for retrieving the current OSF Group - """ - group_lookup_url_kwarg = 'group_id' - - def get_osf_group(self, check_object_permissions=True): - - group = get_object_or_error( - OSFGroup, - self.kwargs[self.group_lookup_url_kwarg], - self.request, - display_name='osf_group', - ) - - if check_object_permissions: - self.check_object_permissions(self.request, group) - return group - - -class GroupBaseView(JSONAPIBaseView, OSFGroupMixin): - required_read_scopes = [CoreScopes.OSF_GROUPS_READ] - required_write_scopes = [CoreScopes.OSF_GROUPS_WRITE] - model_class = apps.get_model('osf.OSFGroup') - - view_category = 'groups' - - -class GroupList(GroupBaseView, generics.ListCreateAPIView, ListFilterMixin): - permission_classes = ( - drf_permissions.IsAuthenticatedOrReadOnly, - base_permissions.TokenHasScope, - ) - - serializer_class = GroupSerializer - view_name = 'group-list' - ordering = ('-modified',) - - @require_flag(OSF_GROUPS) - def get_default_queryset(self): - user = self.request.user - if user.is_anonymous: - return OSFGroup.objects.none() - return user.osf_groups - - # overrides ListCreateAPIView - def get_queryset(self): - return self.get_queryset_from_request() - - # overrides ListCreateAPIView - @require_flag(OSF_GROUPS) - def perform_create(self, serializer): - """Create an OSFGroup. - - :param serializer: - """ - # On creation, logged in user is the creator - user = self.request.user - serializer.save(creator=user) - - -class GroupDetail(GroupBaseView, generics.RetrieveUpdateDestroyAPIView): - permission_classes = ( - drf_permissions.IsAuthenticatedOrReadOnly, - base_permissions.TokenHasScope, - IsGroupManager, - ) - - serializer_class = GroupDetailSerializer - view_name = 'group-detail' - - # Overrides RetrieveUpdateDestroyAPIView - @require_flag(OSF_GROUPS) - def get_object(self): - return self.get_osf_group() - - # Overrides RetrieveUpdateDestroyAPIView - @require_flag(OSF_GROUPS) - def perform_destroy(self, instance): - auth = get_user_auth(self.request) - instance.remove_group(auth=auth) - - -class OSFGroupMemberBaseView(JSONAPIBaseView, OSFGroupMixin): - """ - Base group used for OSFGroupMemberList and OSFGroupMemberDetail - """ - permission_classes = ( - drf_permissions.IsAuthenticatedOrReadOnly, - base_permissions.TokenHasScope, - IsGroupManager, - ) - required_read_scopes = [CoreScopes.OSF_GROUPS_READ] - required_write_scopes = [CoreScopes.OSF_GROUPS_WRITE] - - model_class = apps.get_model('osf.OSFUser') - serializer_class = GroupMemberSerializer - view_category = 'groups' - ordering = ('-modified',) - - def _assert_member_belongs_to_group(self, user): - group = self.get_osf_group() - # Checking group membership instead of permissions, so unregistered members are - # recognized as group members - if not group.is_member(user): - raise NotFound(f'{user._id} cannot be found in this OSFGroup') - - def get_serializer_class(self): - if self.request.method in ('PUT', 'PATCH', 'DELETE'): - return GroupMemberDetailSerializer - elif self.request.method == 'POST': - return GroupMemberCreateSerializer - else: - return GroupMemberSerializer - - # overrides DestroyAPIView - @require_flag(OSF_GROUPS) - def perform_destroy(self, instance): - group = self.get_osf_group() - auth = get_user_auth(self.request) - try: - group.remove_member(instance, auth) - except ValueError as e: - raise ValidationError(detail=str(e)) - - -class GroupMembersList(OSFGroupMemberBaseView, bulk_views.BulkUpdateJSONAPIView, bulk_views.BulkDestroyJSONAPIView, bulk_views.ListBulkCreateJSONAPIView, ListFilterMixin): - view_name = 'group-members' - - # Overrides ListBulkCreateJSONAPIView - def get_queryset(self): - queryset = self.get_queryset_from_request() - if is_bulk_request(self.request): - user_ids = [] - for user in self.request.data: - try: - user_id = user['id'].split('-')[1] - except AttributeError: - raise ValidationError('Member identifier not provided.') - except IndexError: - raise ValidationError('Member identifier incorrectly formatted.') - else: - user_ids.append(user_id) - queryset = queryset.filter(guids___id__in=user_ids) - return queryset - - # Overrides ListFilterMixin - @require_flag(OSF_GROUPS) - def get_default_queryset(self): - # Returns all members and managers of the OSF Group (User objects) - return self.get_osf_group().members - - # Overrides ListBulkCreateJSONAPIView - def get_serializer_context(self): - context = super().get_serializer_context() - # Permissions check handled here - needed when performing write operations - context['group'] = self.get_osf_group() - return context - - # Overrides BulkDestroyJSONAPIView - def get_requested_resources(self, request, request_data): - requested_ids = [] - for data in request_data: - try: - requested_ids.append(data['id'].split('-')[1]) - except IndexError: - raise ValidationError('Member identifier incorrectly formatted.') - - resource_object_list = OSFUser.objects.filter(guids___id__in=requested_ids) - for resource in resource_object_list: - self._assert_member_belongs_to_group(resource) - - if len(resource_object_list) != len(request_data): - raise ValidationError({'non_field_errors': 'Could not find all objects to delete.'}) - - return resource_object_list - - # Overrides ListFilterMixin - def build_query_from_field(self, field_name, operation): - if field_name == 'role': - if operation['op'] != 'eq': - raise InvalidFilterOperator(value=operation['op'], valid_operators=['eq']) - # operation['value'] should be 'member' or 'manager' - role = operation['value'].lower().strip() - if role not in GROUP_ROLES: - raise InvalidFilterValue(value=operation['value']) - group = self.get_osf_group(check_object_permissions=False) - return Q(id__in=group.managers if role == MANAGER else group.members_only) - return super().build_query_from_field(field_name, operation) - - @require_flag(OSF_GROUPS) - def perform_create(self, serializer): - return super().perform_create(serializer) - - -class GroupMemberDetail(OSFGroupMemberBaseView, generics.RetrieveUpdateDestroyAPIView, UserMixin): - permission_classes = ( - drf_permissions.IsAuthenticatedOrReadOnly, - base_permissions.TokenHasScope, - GroupMemberManagement, - ) - view_name = 'group-member-detail' - - # Overrides RetrieveUpdateDestroyAPIView - @require_flag(OSF_GROUPS) - def get_object(self): - user = self.get_user() - self._assert_member_belongs_to_group(user) - return user - - # Overrides RetrieveUpdateDestroyAPIView - def get_serializer_context(self): - context = super().get_serializer_context() - context['group'] = self.get_osf_group(check_object_permissions=False) - return context diff --git a/api/registrations/serializers.py b/api/registrations/serializers.py index c15c947f45f..786d76ddccb 100644 --- a/api/registrations/serializers.py +++ b/api/registrations/serializers.py @@ -305,13 +305,6 @@ class RegistrationSerializer(NodeSerializer): ), ) - groups = HideIfRegistration( - RelationshipField( - related_view='nodes:node-groups', - related_view_kwargs={'node_id': '<_id>'}, - ), - ) - node_links = ShowIfVersion( HideIfWithdrawal( RelationshipField( diff --git a/api/subscriptions/fields.py b/api/subscriptions/fields.py new file mode 100644 index 00000000000..ddbcd4f4aa5 --- /dev/null +++ b/api/subscriptions/fields.py @@ -0,0 +1,11 @@ +from rest_framework import serializers as ser + +class FrequencyField(ser.ChoiceField): + def __init__(self, **kwargs): + super().__init__(choices=['none', 'instantly', 'daily', 'weekly', 'monthly'], **kwargs) + + def to_representation(self, frequency: str): + return frequency or 'none' + + def to_internal_value(self, freq): + return super().to_internal_value(freq) diff --git a/api/subscriptions/permissions.py b/api/subscriptions/permissions.py index 19dc7bcbd58..b22831f2766 100644 --- a/api/subscriptions/permissions.py +++ b/api/subscriptions/permissions.py @@ -1,13 +1,10 @@ from rest_framework import permissions -from osf.models.notifications import NotificationSubscription +from osf.models.notification_subscription import NotificationSubscription class IsSubscriptionOwner(permissions.BasePermission): def has_object_permission(self, request, view, obj): assert isinstance(obj, NotificationSubscription), f'obj must be a NotificationSubscription; got {obj}' - user_id = request.user.id - return obj.none.filter(id=user_id).exists() \ - or obj.email_transactional.filter(id=user_id).exists() \ - or obj.email_digest.filter(id=user_id).exists() + return obj.user == request.user diff --git a/api/subscriptions/serializers.py b/api/subscriptions/serializers.py index da7aadbb1a4..ede0782ae65 100644 --- a/api/subscriptions/serializers.py +++ b/api/subscriptions/serializers.py @@ -1,58 +1,55 @@ +from django.contrib.contenttypes.models import ContentType from rest_framework import serializers as ser -from rest_framework.exceptions import ValidationError from api.nodes.serializers import RegistrationProviderRelationshipField from api.collections_providers.fields import CollectionProviderRelationshipField from api.preprints.serializers import PreprintProviderRelationshipField +from osf.models import Node from website.util import api_v2_url from api.base.serializers import JSONAPISerializer, LinksField - -NOTIFICATION_TYPES = { - 'none': 'none', - 'instant': 'email_transactional', - 'daily': 'email_digest', -} - - -class FrequencyField(ser.Field): - def to_representation(self, obj): - user_id = self.context['request'].user.id - if obj.email_transactional.filter(id=user_id).exists(): - return 'instant' - if obj.email_digest.filter(id=user_id).exists(): - return 'daily' - return 'none' - - def to_internal_value(self, frequency): - notification_type = NOTIFICATION_TYPES.get(frequency) - if notification_type: - return {'notification_type': notification_type} - raise ValidationError(f'Invalid frequency "{frequency}"') +from .fields import FrequencyField class SubscriptionSerializer(JSONAPISerializer): filterable_fields = frozenset([ 'id', 'event_name', + 'frequency', ]) - id = ser.CharField(source='_id', read_only=True) + id = ser.CharField( + read_only=True, + source='legacy_id', + help_text='The id of the subscription fixed for backward compatibility', + ) event_name = ser.CharField(read_only=True) - frequency = FrequencyField(source='*', required=True) - links = LinksField({ - 'self': 'get_absolute_url', - }) + frequency = FrequencyField(source='message_frequency', required=True) class Meta: type_ = 'subscription' + links = LinksField({ + 'self': 'get_absolute_url', + }) + def get_absolute_url(self, obj): return obj.absolute_api_v2_url def update(self, instance, validated_data): user = self.context['request'].user - notification_type = validated_data.get('notification_type') - instance.add_user_to_subscription(user, notification_type, save=True) + frequency = validated_data.get('frequency') or 'none' + instance.message_frequency = frequency + + if frequency != 'none' and instance.content_type == ContentType.objects.get_for_model(Node): + node = Node.objects.get( + id=instance.id, + content_type=instance.content_type, + ) + user_subs = node.parent_node.child_node_subscriptions + if node._id not in user_subs.setdefault(user._id, []): + user_subs[user._id].append(node._id) + node.parent_node.save() + return instance diff --git a/api/subscriptions/views.py b/api/subscriptions/views.py index c1d7e833b49..57a4dbf36c7 100644 --- a/api/subscriptions/views.py +++ b/api/subscriptions/views.py @@ -1,8 +1,11 @@ +from django.db.models import Value, When, Case, F, Q, OuterRef, Subquery +from django.db.models.fields import CharField, IntegerField +from django.db.models.functions import Concat, Cast +from django.contrib.contenttypes.models import ContentType from rest_framework import generics from rest_framework import permissions as drf_permissions from rest_framework.exceptions import NotFound -from django.core.exceptions import ObjectDoesNotExist -from django.db.models import Q +from django.core.exceptions import ObjectDoesNotExist, PermissionDenied from framework.auth.oauth_scopes import CoreScopes from api.base.views import JSONAPIBaseView @@ -16,12 +19,13 @@ ) from api.subscriptions.permissions import IsSubscriptionOwner from osf.models import ( - NotificationSubscription, CollectionProvider, PreprintProvider, RegistrationProvider, - AbstractProvider, + AbstractProvider, AbstractNode, Preprint, OSFUser, ) +from osf.models.notification_type import NotificationType +from osf.models.notification_subscription import NotificationSubscription class SubscriptionList(JSONAPIBaseView, generics.ListAPIView, ListFilterMixin): @@ -37,32 +41,59 @@ class SubscriptionList(JSONAPIBaseView, generics.ListAPIView, ListFilterMixin): required_read_scopes = [CoreScopes.SUBSCRIPTIONS_READ] required_write_scopes = [CoreScopes.NULL] - def get_default_queryset(self): - user = self.request.user - return NotificationSubscription.objects.filter( - Q(none=user) | - Q(email_digest=user) | - Q( - email_transactional=user, - ), - ).distinct() - def get_queryset(self): - return self.get_queryset_from_request() + user_guid = self.request.user._id + provider_ct = ContentType.objects.get(app_label='osf', model='abstractprovider') + + provider_subquery = AbstractProvider.objects.filter( + id=Cast(OuterRef('object_id'), IntegerField()), + ).values('_id')[:1] + + node_subquery = AbstractNode.objects.filter( + id=Cast(OuterRef('object_id'), IntegerField()), + ).values('guids___id')[:1] + + return NotificationSubscription.objects.filter(user=self.request.user).annotate( + event_name=Case( + When( + notification_type__name=NotificationType.Type.NODE_FILES_UPDATED.value, + then=Value('files_updated'), + ), + When( + notification_type__name=NotificationType.Type.USER_FILE_UPDATED.value, + then=Value('global_file_updated'), + ), + default=F('notification_type__name'), + output_field=CharField(), + ), + legacy_id=Case( + When( + notification_type__name=NotificationType.Type.NODE_FILES_UPDATED.value, + then=Concat(Subquery(node_subquery), Value('_file_updated')), + ), + When( + notification_type__name=NotificationType.Type.USER_FILE_UPDATED.value, + then=Value(f'{user_guid}_global'), + ), + When( + Q(notification_type__name=NotificationType.Type.PROVIDER_NEW_PENDING_SUBMISSIONS.value) & + Q(content_type=provider_ct), + then=Concat(Subquery(provider_subquery), Value('_new_pending_submissions')), + ), + default=F('notification_type__name'), + output_field=CharField(), + ), + ) class AbstractProviderSubscriptionList(SubscriptionList): - def get_default_queryset(self): - user = self.request.user + def get_queryset(self): + provider = AbstractProvider.objects.get(_id=self.kwargs['provider_id']) return NotificationSubscription.objects.filter( - provider___id=self.kwargs['provider_id'], - provider__type=self.provider_class._typedmodels_type, - ).filter( - Q(none=user) | - Q(email_digest=user) | - Q(email_transactional=user), - ).distinct() - + object_id=provider, + provider__type=ContentType.objects.get_for_model(provider.__class__), + user=self.request.user, + ) class SubscriptionDetail(JSONAPIBaseView, generics.RetrieveUpdateAPIView): view_name = 'notification-subscription-detail' @@ -79,10 +110,63 @@ class SubscriptionDetail(JSONAPIBaseView, generics.RetrieveUpdateAPIView): def get_object(self): subscription_id = self.kwargs['subscription_id'] + user_guid = self.request.user._id + + provider_ct = ContentType.objects.get(app_label='osf', model='abstractprovider') + node_ct = ContentType.objects.get(app_label='osf', model='abstractnode') + + provider_subquery = AbstractProvider.objects.filter( + id=Cast(OuterRef('object_id'), IntegerField()), + ).values('_id')[:1] + + node_subquery = AbstractNode.objects.filter( + id=Cast(OuterRef('object_id'), IntegerField()), + ).values('guids___id')[:1] + + guid_id, *event_parts = subscription_id.split('_') + event = '_'.join(event_parts) if event_parts else '' + + subscription_obj = AbstractNode.load(guid_id) or Preprint.load(guid_id) or OSFUser.load(guid_id) + + if event != 'global': + obj_filter = Q( + object_id=getattr(subscription_obj, 'id', None), + content_type=ContentType.objects.get_for_model(subscription_obj.__class__), + notification_type__name__icontains=event, + ) + else: + obj_filter = Q() + try: - obj = NotificationSubscription.objects.get(_id=subscription_id) + obj = NotificationSubscription.objects.annotate( + legacy_id=Case( + When( + notification_type__name=NotificationType.Type.NODE_FILES_UPDATED.value, + content_type=node_ct, + then=Concat(Subquery(node_subquery), Value('_file_updated')), + ), + When( + notification_type__name=NotificationType.Type.USER_FILE_UPDATED.value, + then=Value(f'{user_guid}_global'), + ), + When( + notification_type__name=NotificationType.Type.PROVIDER_NEW_PENDING_SUBMISSIONS.value, + content_type=provider_ct, + then=Concat(Subquery(provider_subquery), Value('_new_pending_submissions')), + ), + default=Value(f'{user_guid}_global'), + output_field=CharField(), + ), + ).filter(obj_filter) + except ObjectDoesNotExist: raise NotFound + + try: + obj = obj.filter(user=self.request.user).get() + except ObjectDoesNotExist: + raise PermissionDenied + self.check_object_permissions(self.request, obj) return obj @@ -100,33 +184,6 @@ class AbstractProviderSubscriptionDetail(SubscriptionDetail): required_write_scopes = [CoreScopes.SUBSCRIPTIONS_WRITE] provider_class = None - def __init__(self, *args, **kwargs): - assert issubclass(self.provider_class, AbstractProvider), 'Class must be subclass of AbstractProvider' - super().__init__(*args, **kwargs) - - def get_object(self): - subscription_id = self.kwargs['subscription_id'] - if self.kwargs.get('provider_id'): - provider = self.provider_class.objects.get(_id=self.kwargs.get('provider_id')) - try: - obj = NotificationSubscription.objects.get( - _id=subscription_id, - provider_id=provider.id, - ) - except ObjectDoesNotExist: - raise NotFound - else: - try: - obj = NotificationSubscription.objects.get( - _id=subscription_id, - provider__type=self.provider_class._typedmodels_type, - ) - except ObjectDoesNotExist: - raise NotFound - self.check_object_permissions(self.request, obj) - return obj - - class CollectionProviderSubscriptionDetail(AbstractProviderSubscriptionDetail): provider_class = CollectionProvider serializer_class = CollectionSubscriptionSerializer diff --git a/api/users/serializers.py b/api/users/serializers.py index f31130d0420..193b2978dc5 100644 --- a/api/users/serializers.py +++ b/api/users/serializers.py @@ -118,13 +118,6 @@ class UserSerializer(JSONAPISerializer): ), ) - groups = HideIfDisabled( - RelationshipField( - related_view='users:user-groups', - related_view_kwargs={'user_id': '<_id>'}, - ), - ) - registrations = HideIfDisabled( RelationshipField( related_view='users:user-registrations', diff --git a/api/users/urls.py b/api/users/urls.py index 2e7b02ff2e1..3184872707f 100644 --- a/api/users/urls.py +++ b/api/users/urls.py @@ -19,13 +19,11 @@ re_path(r'^(?P\w+)/draft_registrations/$', views.UserDraftRegistrations.as_view(), name=views.UserDraftRegistrations.view_name), re_path(r'^(?P\w+)/institutions/$', views.UserInstitutions.as_view(), name=views.UserInstitutions.view_name), re_path(r'^(?P\w+)/nodes/$', views.UserNodes.as_view(), name=views.UserNodes.view_name), - re_path(r'^(?P\w+)/groups/$', views.UserGroups.as_view(), name=views.UserGroups.view_name), re_path(r'^(?P\w+)/preprints/$', views.UserPreprints.as_view(), name=views.UserPreprints.view_name), re_path(r'^(?P\w+)/draft_preprints/$', views.UserDraftPreprints.as_view(), name=views.UserDraftPreprints.view_name), re_path(r'^(?P\w+)/registrations/$', views.UserRegistrations.as_view(), name=views.UserRegistrations.view_name), re_path(r'^(?P\w+)/settings/$', views.UserSettings.as_view(), name=views.UserSettings.view_name), re_path(r'^(?P\w+)/messages/$', views.UserMessageView.as_view(), name=views.UserMessageView.view_name), - re_path(r'^(?P\w+)/quickfiles/$', views.UserQuickFiles.as_view(), name=views.UserQuickFiles.view_name), re_path(r'^(?P\w+)/relationships/institutions/$', views.UserInstitutionsRelationship.as_view(), name=views.UserInstitutionsRelationship.view_name), re_path(r'^(?P\w+)/settings/emails/$', views.UserEmailsList.as_view(), name=views.UserEmailsList.view_name), re_path(r'^(?P\w+)/settings/emails/(?P\w+)/$', views.UserEmailsDetail.as_view(), name=views.UserEmailsDetail.view_name), diff --git a/api/users/views.py b/api/users/views.py index 6387bcbcea9..8dea51613df 100644 --- a/api/users/views.py +++ b/api/users/views.py @@ -12,8 +12,7 @@ from api.addons.views import AddonSettingsMixin from api.base import permissions as base_permissions from api.users.permissions import UserMessagePermissions -from api.base.waffle_decorators import require_flag -from api.base.exceptions import Conflict, UserGone, Gone +from api.base.exceptions import Conflict, UserGone from api.base.filters import ListFilterMixin, PreprintFilterMixin from api.base.parsers import ( JSONAPIRelationshipParser, @@ -41,7 +40,6 @@ from api.nodes.filters import NodesFilterMixin, UserNodesFilterMixin from api.nodes.serializers import DraftRegistrationLegacySerializer from api.nodes.utils import NodeOptimizationMixin -from api.osf_groups.serializers import GroupSerializer from api.preprints.serializers import PreprintSerializer, PreprintDraftSerializer from api.registrations import annotations as registration_annotations from api.registrations.serializers import RegistrationSerializer @@ -85,7 +83,6 @@ from framework.utils import throttle_period_expired from framework.sessions.utils import remove_sessions_for_user from framework.exceptions import PermissionsError, HTTPError -from osf.features import OSF_GROUPS from rest_framework import permissions as drf_permissions from rest_framework import generics from rest_framework import status @@ -99,7 +96,6 @@ Preprint, Node, Registration, - OSFGroup, OSFUser, Email, Tag, @@ -360,49 +356,6 @@ def get_queryset(self): ) -class UserGroups(JSONAPIBaseView, generics.ListAPIView, UserMixin, ListFilterMixin): - permission_classes = ( - drf_permissions.IsAuthenticatedOrReadOnly, - base_permissions.TokenHasScope, - ) - required_read_scopes = [CoreScopes.OSF_GROUPS_READ] - required_write_scopes = [CoreScopes.NULL] - - model_class = apps.get_model('osf.OSFGroup') - serializer_class = GroupSerializer - view_category = 'users' - view_name = 'user-groups' - ordering = ('-modified',) - - @require_flag(OSF_GROUPS) - def get_default_queryset(self): - requested_user = self.get_user() - current_user = self.request.user - if current_user.is_anonymous: - return OSFGroup.objects.none() - return requested_user.osf_groups.filter(id__in=current_user.osf_groups.values_list('id', flat=True)) - - # overrides ListAPIView - def get_queryset(self): - return self.get_queryset_from_request() - - -class UserQuickFiles(JSONAPIBaseView, generics.ListAPIView): - view_category = 'users' - view_name = 'user-quickfiles' - - permission_classes = ( - drf_permissions.IsAuthenticatedOrReadOnly, - base_permissions.TokenHasScope, - ) - - required_read_scopes = [CoreScopes.NULL] - required_write_scopes = [CoreScopes.NULL] - - def get(self, *args, **kwargs): - raise Gone() - - class UserPreprints(JSONAPIBaseView, generics.ListAPIView, UserMixin, PreprintFilterMixin): """The documentation for this endpoint can be found [here](https://developer.osf.io/#operation/users_preprints_list). """ diff --git a/api_tests/crossref/views/test_crossref_email_response.py b/api_tests/crossref/views/test_crossref_email_response.py index 2504c2a092e..775a0045c06 100644 --- a/api_tests/crossref/views/test_crossref_email_response.py +++ b/api_tests/crossref/views/test_crossref_email_response.py @@ -1,4 +1,3 @@ -from unittest import mock import pytest import hmac import hashlib @@ -11,6 +10,7 @@ @pytest.mark.django_db +@pytest.mark.usefixtures('mock_send_grid') class TestCrossRefEmailResponse: def make_mailgun_payload(self, crossref_response): @@ -155,49 +155,47 @@ def test_wrong_request_context_raises_permission_error(self, app, url, error_xml assert response.status_code == 400 - def test_error_response_sends_message_does_not_set_doi(self, app, url, preprint, error_xml): + def test_error_response_sends_message_does_not_set_doi(self, app, url, preprint, error_xml, mock_send_grid): assert not preprint.get_identifier_value('doi') - with mock.patch('framework.auth.views.mails.send_mail') as mock_send_mail: - context_data = self.make_mailgun_payload(crossref_response=error_xml) - app.post(url, context_data) - assert mock_send_mail.called + context_data = self.make_mailgun_payload(crossref_response=error_xml) + app.post(url, context_data) + assert mock_send_grid.called assert not preprint.get_identifier_value('doi') - def test_success_response_sets_doi(self, app, url, preprint, success_xml): + def test_success_response_sets_doi(self, app, url, preprint, success_xml, mock_send_grid): assert not preprint.get_identifier_value('doi') - with mock.patch('framework.auth.views.mails.send_mail') as mock_send_mail: - context_data = self.make_mailgun_payload(crossref_response=success_xml) - app.post(url, context_data) + context_data = self.make_mailgun_payload(crossref_response=success_xml) + mock_send_grid.reset_mock() + app.post(url, context_data) preprint.reload() - assert not mock_send_mail.called + assert not mock_send_grid.called assert preprint.get_identifier_value('doi') assert preprint.preprint_doi_created - def test_update_success_response(self, app, preprint, url): + def test_update_success_response(self, app, preprint, url, mock_send_grid): initial_value = 'TempDOIValue' preprint.set_identifier_value(category='doi', value=initial_value) update_xml = self.update_success_xml(preprint) - with mock.patch('framework.auth.views.mails.send_mail') as mock_send_mail: - context_data = self.make_mailgun_payload(crossref_response=update_xml) - app.post(url, context_data) + context_data = self.make_mailgun_payload(crossref_response=update_xml) + mock_send_grid.reset_mock() + app.post(url, context_data) - assert not mock_send_mail.called + assert not mock_send_grid.called assert preprint.get_identifier_value(category='doi') != initial_value - def test_update_success_does_not_set_preprint_doi_created(self, app, preprint, url): + def test_update_success_does_not_set_preprint_doi_created(self, app, preprint, url, mock_send_grid): preprint.set_identifier_value(category='doi', value='test') preprint.preprint_doi_created = timezone.now() preprint.save() update_xml = self.update_success_xml(preprint) pre_created = preprint.preprint_doi_created - with mock.patch('framework.auth.views.mails.send_mail'): - context_data = self.make_mailgun_payload(crossref_response=update_xml) - app.post(url, context_data) + context_data = self.make_mailgun_payload(crossref_response=update_xml) + app.post(url, context_data) assert preprint.preprint_doi_created == pre_created @@ -214,14 +212,14 @@ def test_success_batch_response(self, app, url): for preprint in preprint_list: assert preprint.get_identifier_value('doi') == settings.DOI_FORMAT.format(prefix=provider.doi_prefix, guid=preprint._id) - def test_confirmation_marks_legacy_doi_as_deleted(self, app, url, preprint): + def test_confirmation_marks_legacy_doi_as_deleted(self, app, url, preprint, mock_send_grid): legacy_value = 'IAmALegacyDOI' preprint.set_identifier_value(category='legacy_doi', value=legacy_value) update_xml = self.update_success_xml(preprint) - with mock.patch('framework.auth.views.mails.send_mail') as mock_send_mail: - context_data = self.make_mailgun_payload(crossref_response=update_xml) - app.post(url, context_data) + context_data = self.make_mailgun_payload(crossref_response=update_xml) + mock_send_grid.reset_mock() + app.post(url, context_data) - assert not mock_send_mail.called + assert not mock_send_grid.called assert preprint.identifiers.get(category='legacy_doi').deleted diff --git a/api_tests/draft_registrations/views/test_draft_registration_contributor_detail.py b/api_tests/draft_registrations/views/test_draft_registration_contributor_detail.py index 38b4156e116..0c2dce3501b 100644 --- a/api_tests/draft_registrations/views/test_draft_registration_contributor_detail.py +++ b/api_tests/draft_registrations/views/test_draft_registration_contributor_detail.py @@ -165,14 +165,6 @@ def url_contrib(self, project, contrib): return '/{}draft_registrations/{}/contributors/{}/'.format( API_BASE, project._id, contrib._id) - def test_change_contributor_non_admin_osf_group_member_auth(self, project, contrib): - # Overrides TestNodeContributorUpdate - drafts have no group perms - return - - def test_change_contributor_admin_osf_group_permissions(self, project, contrib): - # Overrides TestNodeContributorUpdate - drafts have no group perms - return - class TestDraftRegistrationContributorPartialUpdate(TestNodeContributorPartialUpdate): @pytest.fixture() @@ -259,14 +251,6 @@ def url_user_non_contrib(self, project, user_non_contrib): return '/{}draft_registrations/{}/contributors/{}/'.format( API_BASE, project._id, user_non_contrib._id) - def test_remove_contributor_osf_group_member_read(self): - # Overrides TestNodeContributorDelete - drafts don't have group members - return - - def test_remove_contributor_osf_group_member_admin(self): - # Overrides TestNodeContributorDelete - drafts don't have group members - return - @pytest.mark.django_db class TestDraftBibliographicContributorDetail(): diff --git a/api_tests/draft_registrations/views/test_draft_registration_contributor_list.py b/api_tests/draft_registrations/views/test_draft_registration_contributor_list.py index 5c07057ed5d..71fe7450b6d 100644 --- a/api_tests/draft_registrations/views/test_draft_registration_contributor_list.py +++ b/api_tests/draft_registrations/views/test_draft_registration_contributor_list.py @@ -199,20 +199,6 @@ def url_public(self, project_public): def url_private(self, project_private): return f'/{API_BASE}draft_registrations/{project_private._id}/contributors/?send_email=false' - # Overrides TestNodeContributorAdd - def test_adds_contributor_public_project_non_admin_osf_group( - self, app, user, user_two, user_three, - project_public, data_user_three, url_public): - # Draft registrations don't have groups - return - - # Overrides TestNodeContributorAdd - def test_adds_contributor_private_project_osf_group_admin_perms( - self, app, user, user_two, user_three, project_private, - data_user_two, url_private): - # Draft registrations don't have groups - return - class TestDraftRegistrationContributorCreateValidation(DraftRegistrationCRUDTestCase, TestNodeContributorCreateValidation): @@ -222,16 +208,16 @@ def create_serializer(self): return DraftRegistrationContributorsCreateSerializer +@pytest.mark.usefixtures('mock_send_grid') class TestDraftContributorCreateEmail(DraftRegistrationCRUDTestCase, TestNodeContributorCreateEmail): @pytest.fixture() def url_project_contribs(self, project_public): # Overrides TestNodeContributorCreateEmail return f'/{API_BASE}draft_registrations/{project_public._id}/contributors/' - @mock.patch('framework.auth.views.mails.send_mail') def test_add_contributor_sends_email( - self, mock_mail, app, user, user_two, - url_project_contribs): + self, app, user, user_two, + url_project_contribs, mock_send_grid): # Overrides TestNodeContributorCreateEmail url = f'{url_project_contribs}?send_email=draft_registration' payload = { @@ -252,7 +238,7 @@ def test_add_contributor_sends_email( res = app.post_json_api(url, payload, auth=user.auth) assert res.status_code == 201 - assert mock_mail.call_count == 1 + assert mock_send_grid.call_count == 1 # Overrides TestNodeContributorCreateEmail def test_add_contributor_signal_if_default( @@ -278,9 +264,8 @@ def test_add_contributor_signal_if_default( assert res.json['errors'][0]['detail'] == 'default is not a valid email preference.' # Overrides TestNodeContributorCreateEmail - @mock.patch('framework.auth.views.mails.send_mail') def test_add_unregistered_contributor_sends_email( - self, mock_mail, app, user, url_project_contribs): + self, mock_send_grid, app, user, url_project_contribs): url = f'{url_project_contribs}?send_email=draft_registration' payload = { 'data': { @@ -293,7 +278,7 @@ def test_add_unregistered_contributor_sends_email( } res = app.post_json_api(url, payload, auth=user.auth) assert res.status_code == 201 - assert mock_mail.call_count == 1 + assert mock_send_grid.call_count == 1 # Overrides TestNodeContributorCreateEmail @mock.patch('website.project.signals.unreg_contributor_added.send') @@ -315,9 +300,8 @@ def test_add_unregistered_contributor_signal_if_default( assert 'draft_registration' == kwargs['email_template'] # Overrides TestNodeContributorCreateEmail - @mock.patch('framework.auth.views.mails.send_mail') def test_add_unregistered_contributor_without_email_no_email( - self, mock_mail, app, user, url_project_contribs): + self, mock_send_grid, app, user, url_project_contribs): url = f'{url_project_contribs}?send_email=draft_registration' payload = { 'data': { @@ -332,7 +316,7 @@ def test_add_unregistered_contributor_without_email_no_email( res = app.post_json_api(url, payload, auth=user.auth) assert contributor_added in mock_signal.signals_sent() assert res.status_code == 201 - assert mock_mail.call_count == 0 + assert mock_send_grid.call_count == 0 class TestDraftContributorBulkCreate(DraftRegistrationCRUDTestCase, TestNodeContributorBulkCreate): diff --git a/api_tests/draft_registrations/views/test_draft_registration_list.py b/api_tests/draft_registrations/views/test_draft_registration_list.py index 1126af09ad3..d19c6d994d5 100644 --- a/api_tests/draft_registrations/views/test_draft_registration_list.py +++ b/api_tests/draft_registrations/views/test_draft_registration_list.py @@ -1,4 +1,3 @@ -from unittest import mock import pytest from framework.auth.core import Auth @@ -18,7 +17,7 @@ ) from osf.utils.permissions import READ, WRITE, ADMIN -from website import mails, settings +from website import settings @pytest.fixture(autouse=True) @@ -53,10 +52,6 @@ def user_read_contrib(self): def user_non_contrib(self): return AuthUserFactory() - @pytest.fixture() - def group_mem(self): - return AuthUserFactory() - @pytest.fixture() def project(self, user): return ProjectFactory(creator=user) @@ -162,6 +157,7 @@ def test_draft_with_deleted_registered_node_shows_up_in_draft_list( assert data[0]['attributes']['registration_metadata'] == {} +@pytest.mark.usefixtures('mock_send_grid') class TestDraftRegistrationCreateWithNode(AbstractDraftRegistrationTestCase): @pytest.fixture() @@ -319,7 +315,7 @@ def test_read_only_contributor_cannot_create_draft( assert res.status_code == 403 def test_non_authenticated_user_cannot_create_draft( - self, app, user_write_contrib, payload_alt, group, url_draft_registrations + self, app, user_write_contrib, payload_alt, url_draft_registrations ): res = app.post_json_api( url_draft_registrations, @@ -340,11 +336,11 @@ def test_logged_in_non_contributor_cannot_create_draft( ) assert res.status_code == 403 - def test_create_project_based_draft_does_not_email_initiator(self, app, user, url_draft_registrations, payload): - with mock.patch.object(mails, 'send_mail') as mock_send_mail: - app.post_json_api(f'{url_draft_registrations}?embed=branched_from&embed=initiator', payload, auth=user.auth) + def test_create_project_based_draft_does_not_email_initiator(self, app, user, url_draft_registrations, payload, mock_send_grid): + mock_send_grid.reset_mock() + app.post_json_api(f'{url_draft_registrations}?embed=branched_from&embed=initiator', payload, auth=user.auth) - assert not mock_send_mail.called + assert not mock_send_grid.called def test_affiliated_institutions_are_copied_from_node_no_institutions(self, app, user, url_draft_registrations, payload): """ @@ -406,6 +402,7 @@ def test_affiliated_institutions_are_copied_from_user(self, app, user, url_draft assert list(draft_registration.affiliated_institutions.all()) == list(user.get_affiliated_institutions()) +@pytest.mark.usefixtures('mock_send_grid') class TestDraftRegistrationCreateWithoutNode(AbstractDraftRegistrationTestCase): @pytest.fixture() def url_draft_registrations(self): @@ -432,23 +429,21 @@ def test_admin_can_create_draft( assert draft.creator == user assert draft.has_permission(user, ADMIN) is True - def test_create_no_project_draft_emails_initiator(self, app, user, url_draft_registrations, payload): + def test_create_no_project_draft_emails_initiator(self, app, user, url_draft_registrations, payload, mock_send_grid): # Intercepting the send_mail call from website.project.views.contributor.notify_added_contributor - with mock.patch.object(mails, 'send_mail') as mock_send_mail: - resp = app.post_json_api( - f'{url_draft_registrations}?embed=branched_from&embed=initiator', - payload, - auth=user.auth - ) - assert mock_send_mail.called + app.post_json_api( + f'{url_draft_registrations}?embed=branched_from&embed=initiator', + payload, + auth=user.auth + ) + assert mock_send_grid.called # Python 3.6 does not support mock.call_args.args/kwargs # Instead, mock.call_args[0] is positional args, mock.call_args[1] is kwargs # (note, this is compatible with later versions) - mock_send_kwargs = mock_send_mail.call_args[1] - assert mock_send_kwargs['mail'] == mails.CONTRIBUTOR_ADDED_DRAFT_REGISTRATION - assert mock_send_kwargs['user'] == user - assert mock_send_kwargs['node'] == DraftRegistration.load(resp.json['data']['id']) + mock_send_kwargs = mock_send_grid.call_args[1] + assert mock_send_kwargs['subject'] == 'You have a new registration draft.' + assert mock_send_kwargs['to_addr'] == user.email def test_create_draft_with_provider( self, app, user, url_draft_registrations, non_default_provider, payload_with_non_default_provider diff --git a/api_tests/files/views/test_file_detail.py b/api_tests/files/views/test_file_detail.py index 01f7da21a26..58d58d0cf95 100644 --- a/api_tests/files/views/test_file_detail.py +++ b/api_tests/files/views/test_file_detail.py @@ -16,7 +16,7 @@ from api.base.settings.defaults import API_BASE from api_tests import utils as api_utils from framework.auth.core import Auth -from osf.models import NodeLog, QuickFilesNode, Node, FileVersionUserMetadata +from osf.models import NodeLog, Node, FileVersionUserMetadata from osf.utils.permissions import WRITE, READ from osf.utils.workflows import DefaultStates from osf_tests.factories import ( @@ -56,10 +56,6 @@ class TestFileView: def node(self, user): return ProjectFactory(creator=user, comment_level='public') - @pytest.fixture() - def quickfiles_node(self, user): - return QuickFilesNode.objects.get(creator=user) - @pytest.fixture() def file(self, user, node): return api_utils.create_test_file(node, user, create_guid=False) diff --git a/api_tests/institutions/views/test_institution_relationship_nodes.py b/api_tests/institutions/views/test_institution_relationship_nodes.py index 901c2e552ca..c62d760710d 100644 --- a/api_tests/institutions/views/test_institution_relationship_nodes.py +++ b/api_tests/institutions/views/test_institution_relationship_nodes.py @@ -1,5 +1,4 @@ import pytest -from unittest import mock from api.base.settings.defaults import API_BASE from osf_tests.factories import ( @@ -26,6 +25,7 @@ def make_registration_payload(*node_ids): @pytest.mark.django_db +@pytest.mark.usefixtures('mock_send_grid') class TestInstitutionRelationshipNodes: @pytest.fixture() @@ -373,47 +373,44 @@ def test_add_non_node(self, app, user, institution, url_institution_nodes): assert res.status_code == 404 def test_email_sent_on_affiliation_addition(self, app, user, institution, node_without_institution, - url_institution_nodes): + url_institution_nodes, mock_send_grid): node_without_institution.add_contributor(user, permissions='admin') current_institution = InstitutionFactory() node_without_institution.affiliated_institutions.add(current_institution) - with mock.patch('osf.models.mixins.mails.send_mail') as mocked_send_mail: - res = app.post_json_api( - url_institution_nodes, - { - 'data': [ - { - 'type': 'nodes', 'id': node_without_institution._id - } - ] - }, - auth=user.auth - ) - - assert res.status_code == 201 - mocked_send_mail.assert_called_once() - - def test_email_sent_on_affiliation_removal(self, app, admin, institution, node_public, url_institution_nodes): + res = app.post_json_api( + url_institution_nodes, + { + 'data': [ + { + 'type': 'nodes', 'id': node_without_institution._id + } + ] + }, + auth=user.auth + ) + + assert res.status_code == 201 + mock_send_grid.assert_called_once() + + def test_email_sent_on_affiliation_removal(self, app, admin, institution, node_public, url_institution_nodes, mock_send_grid): current_institution = InstitutionFactory() node_public.affiliated_institutions.add(current_institution) - with mock.patch('osf.models.mixins.mails.send_mail') as mocked_send_mail: - res = app.delete_json_api( - url_institution_nodes, - { - 'data': [ - { - 'type': 'nodes', 'id': node_public._id - } - ] - }, - auth=admin.auth - ) - - # Assert response is successful - assert res.status_code == 204 - - call_args = mocked_send_mail.call_args[1] - assert call_args['user'] == admin - assert node_public == call_args['node'] + res = app.delete_json_api( + url_institution_nodes, + { + 'data': [ + { + 'type': 'nodes', 'id': node_public._id + } + ] + }, + auth=admin.auth + ) + + # Assert response is successful + assert res.status_code == 204 + + call_args = mock_send_grid.call_args[1] + assert call_args['to_addr'] == admin.email diff --git a/api_tests/mailhog/test_mailhog.py b/api_tests/mailhog/test_mailhog.py new file mode 100644 index 00000000000..e7720e96afa --- /dev/null +++ b/api_tests/mailhog/test_mailhog.py @@ -0,0 +1,83 @@ +import requests +import pytest +from website.mails import send_mail, TEST +from waffle.testutils import override_switch +from osf import features +from website import settings +from osf_tests.factories import ( + fake_email, + AuthUserFactory, +) +from tests.base import ( + capture_signals, + fake +) +from framework import auth +from unittest import mock +from osf.models import OSFUser +from tests.base import ( + OsfTestCase, +) +from website.util import api_url_for +from conftest import start_mock_send_grid + + +@pytest.mark.django_db +@pytest.mark.usefixtures('mock_send_grid') +class TestMailHog: + + def test_mailhog_recived_mail(self, mock_send_grid): + with override_switch(features.ENABLE_MAILHOG, active=True): + mailhog_v1 = f'{settings.MAILHOG_API_HOST}/api/v1/messages' + mailhog_v2 = f'{settings.MAILHOG_API_HOST}/api/v2/messages' + requests.delete(mailhog_v1) + + send_mail('to_addr@mail.com', TEST, name='Mailhog') + res = requests.get(mailhog_v2).json() + assert res['count'] == 1 + assert res['items'][0]['Content']['Headers']['To'][0] == 'to_addr@mail.com' + assert res['items'][0]['Content']['Headers']['Subject'][0] == 'A test email to Mailhog' + mock_send_grid.assert_called() + requests.delete(mailhog_v1) + + +@pytest.mark.django_db +@mock.patch('website.mails.settings.USE_EMAIL', True) +@mock.patch('website.mails.settings.USE_CELERY', False) +class TestAuthMailhog(OsfTestCase): + + def setUp(self): + super().setUp() + self.user = AuthUserFactory() + self.auth = self.user.auth + + self.mock_send_grid = start_mock_send_grid(self) + + def test_recived_confirmation(self): + url = api_url_for('register_user') + name, email, password = fake.name(), fake_email(), 'underpressure' + mailhog_v1 = f'{settings.MAILHOG_API_HOST}/api/v1/messages' + mailhog_v2 = f'{settings.MAILHOG_API_HOST}/api/v2/messages' + requests.delete(mailhog_v1) + + with override_switch(features.ENABLE_MAILHOG, active=True): + with capture_signals() as mock_signals: + self.app.post( + url, + json={ + 'fullName': name, + 'email1': email, + 'email2': email, + 'password': password, + } + ) + res = requests.get(mailhog_v2).json() + + assert mock_signals.signals_sent() == {auth.signals.user_registered, auth.signals.unconfirmed_user_created} + assert self.mock_send_grid.called + + user = OSFUser.objects.get(username=email) + user_token = list(user.email_verifications.keys())[0] + ideal_link_path = f'/confirm/{user._id}/{user_token}/' + + assert ideal_link_path in res['items'][0]['Content']['Body'] diff --git a/api_tests/nodes/views/test_node_children_list.py b/api_tests/nodes/views/test_node_children_list.py index a6a891a5b8b..c1375ed1cc7 100644 --- a/api_tests/nodes/views/test_node_children_list.py +++ b/api_tests/nodes/views/test_node_children_list.py @@ -8,7 +8,6 @@ from osf_tests.factories import ( NodeFactory, ProjectFactory, - OSFGroupFactory, RegistrationFactory, AuthUserFactory, PrivateLinkFactory, @@ -108,16 +107,6 @@ def test_return_private_node_children_list( assert len(res.json['data']) == 1 assert res.json['data'][0]['id'] == component._id - # test_return_private_node_children_osf_group_member_admin - group_mem = AuthUserFactory() - group = OSFGroupFactory(creator=group_mem) - private_project.add_osf_group(group, permissions.ADMIN) - res = app.get(private_project_url, auth=group_mem.auth) - assert res.status_code == 200 - # Can view node children that you have implict admin permissions - assert len(res.json['data']) == 1 - assert res.json['data'][0]['id'] == component._id - def test_node_children_list_does_not_include_pointers( self, app, user, component, private_project_url): res = app.get(private_project_url, auth=user.auth) @@ -391,23 +380,6 @@ def test_creates_child(self, app, user, project, child, url): project.reload() assert len(project.nodes) == 0 - # test_creates_child_group_member_read - group_mem = AuthUserFactory() - group = OSFGroupFactory(creator=group_mem) - project.add_osf_group(group, permissions.READ) - res = app.post_json_api( - url, child, auth=group_mem.auth, - expect_errors=True - ) - assert res.status_code == 403 - - project.update_osf_group(group, permissions.WRITE) - res = app.post_json_api( - url, child, auth=group_mem.auth, - expect_errors=True - ) - assert res.status_code == 201 - # test_creates_child_no_type child = { 'data': { diff --git a/api_tests/nodes/views/test_node_citations.py b/api_tests/nodes/views/test_node_citations.py index 22e877b523e..9e0b0b9e6bf 100644 --- a/api_tests/nodes/views/test_node_citations.py +++ b/api_tests/nodes/views/test_node_citations.py @@ -7,7 +7,6 @@ from osf_tests.factories import ( ProjectFactory, AuthUserFactory, - OSFGroupFactory ) @@ -119,13 +118,6 @@ def test_node_citations( assert res.status_code == 401 assert res.json['errors'][0]['detail'] == exceptions.NotAuthenticated.default_detail - # test_read_group_mem_can_view_private_project_citations - group_mem = AuthUserFactory() - group = OSFGroupFactory(creator=group_mem) - private_project.add_osf_group(group, READ) - res = app.get(private_url, auth=group_mem.auth) - assert res.status_code == 200 - # test_unauthenticated_can_view_public_project_citations res = app.get(public_url) assert res.status_code == 200 diff --git a/api_tests/nodes/views/test_node_comments_list.py b/api_tests/nodes/views/test_node_comments_list.py index c8e01a62e0b..e8cbf9c0e6b 100644 --- a/api_tests/nodes/views/test_node_comments_list.py +++ b/api_tests/nodes/views/test_node_comments_list.py @@ -11,7 +11,6 @@ from osf_tests.factories import ( ProjectFactory, RegistrationFactory, - OSFGroupFactory, AuthUserFactory, CommentFactory, ) @@ -355,18 +354,6 @@ def test_node_comments( assert res.status_code == 403 assert res.json['errors'][0]['detail'] == exceptions.PermissionDenied.default_detail - # test_private_node_private_comment_level_osf_group_member_can_comment - project_dict = project_private_comment_private - group_mem = AuthUserFactory() - group = OSFGroupFactory(creator=group_mem) - project_dict['project'].add_osf_group(group, READ) - res = app.post_json_api( - project_dict['url'], - project_dict['payload'], - auth=group_mem.auth, - expect_errors=True) - assert res.status_code == 201 - # test_private_node_private_comment_level_logged_out_user_cannot_comment project_dict = project_private_comment_private res = app.post_json_api( @@ -544,18 +531,6 @@ def test_node_comments_disabled( expect_errors=True) assert res.status_code == 501 - # test_private_node_private_comment_level_osf_group_member_can_comment - project_dict = project_private_comment_private - group_mem = AuthUserFactory() - group = OSFGroupFactory(creator=group_mem) - project_dict['project'].add_osf_group(group, READ) - res = app.post_json_api( - project_dict['url'], - project_dict['payload'], - auth=group_mem.auth, - expect_errors=True) - assert res.status_code == 501 - # test_private_node_with_public_comment_level_admin_can_comment project_dict = project_private_comment_public res = app.post_json_api( diff --git a/api_tests/nodes/views/test_node_contributors_and_group_members_list.py b/api_tests/nodes/views/test_node_contributors_and_group_members_list.py index 1cf6739aa44..f79bdd56f98 100644 --- a/api_tests/nodes/views/test_node_contributors_and_group_members_list.py +++ b/api_tests/nodes/views/test_node_contributors_and_group_members_list.py @@ -3,10 +3,9 @@ from api.base.settings.defaults import API_BASE from osf_tests.factories import ( ProjectFactory, - OSFGroupFactory, AuthUserFactory, ) -from osf.utils.permissions import READ, WRITE +from osf.utils.permissions import WRITE @pytest.fixture() def non_contributor(): @@ -21,43 +20,19 @@ def write_contributor(): return AuthUserFactory() @pytest.fixture() -def group_manager(): - user = AuthUserFactory() - user.given_name = 'Dawn' - user.save() - return user - -@pytest.fixture() -def group_member(): - return AuthUserFactory() - -@pytest.fixture() -def group_member_and_contributor(): - return AuthUserFactory() - -@pytest.fixture() -def group(group_manager, group_member, group_member_and_contributor): - group = OSFGroupFactory(creator=group_manager) - group.make_member(group_member) - group.make_member(group_member_and_contributor) - return group - -@pytest.fixture() -def project(group, admin_contributor, write_contributor, group_member_and_contributor): +def project(admin_contributor, write_contributor): project = ProjectFactory( creator=admin_contributor ) project.add_contributor(write_contributor, WRITE) - project.add_contributor(group_member_and_contributor, READ) - project.add_osf_group(group) return project @pytest.mark.django_db class TestNodeContributorsAndGroupMembers: def test_list_and_filter_contributors_and_group_members( - self, app, project, admin_contributor, write_contributor, group_manager, - group_member, group_member_and_contributor, non_contributor): + self, app, project, admin_contributor, write_contributor, + non_contributor): url = f'/{API_BASE}nodes/{project._id}/contributors_and_group_members/' # unauthenticated @@ -72,33 +47,19 @@ def test_list_and_filter_contributors_and_group_members( res = app.get(url, auth=write_contributor.auth, expect_errors=True) assert res.status_code == 200 - # group_member - res = app.get(url, auth=group_member.auth, expect_errors=True) - assert res.status_code == 200 - # assert all contributors and group members appear, no duplicates res = app.get(url, auth=admin_contributor.auth) assert res.status_code == 200 assert res.content_type == 'application/vnd.api+json' - assert len(res.json['data']) == 5 + assert len(res.json['data']) == 2 expected = { admin_contributor._id, write_contributor._id, - group_manager._id, - group_member._id, - group_member_and_contributor._id } actual = {node['id'] for node in res.json['data']} assert actual == expected - url = f'/{API_BASE}nodes/{project._id}/contributors_and_group_members/?filter[given_name]={group_manager.given_name}' - res = app.get(url, auth=admin_contributor.auth) - assert res.status_code == 200 - assert res.content_type == 'application/vnd.api+json' - assert len(res.json['data']) == 1 - assert res.json['data'][0]['id'] == group_manager._id - url = f'/{API_BASE}nodes/{project._id}/contributors_and_group_members/?filter[given_name]=NOT_EVEN_A_NAME' res = app.get(url, auth=admin_contributor.auth) assert res.status_code == 200 diff --git a/api_tests/nodes/views/test_node_contributors_detail.py b/api_tests/nodes/views/test_node_contributors_detail.py index 623c0c15803..57f7e41444f 100644 --- a/api_tests/nodes/views/test_node_contributors_detail.py +++ b/api_tests/nodes/views/test_node_contributors_detail.py @@ -5,7 +5,6 @@ from osf.models import NodeLog from osf_tests.factories import ( ProjectFactory, - OSFGroupFactory, AuthUserFactory, ) from tests.utils import assert_latest_log @@ -89,17 +88,6 @@ def test_get_private_node_contributor_detail_contributor_auth(self, app, user, p assert res.status_code == 200 assert res.json['data']['id'] == f'{project_private._id}-{user._id}' - # test_get_private_node_osf_group_member - group_mem = AuthUserFactory() - osf_group = OSFGroupFactory(creator=group_mem) - project_private.add_osf_group(osf_group, permissions.READ) - res = app.get( - self.url_private(project_private, user._id), - auth=group_mem.auth - ) - assert res.status_code == 200 - assert res.json['data']['id'] == f'{project_private._id}-{user._id}' - def test_get_private_node_contributor_detail_non_contributor(self, app, user, project_private): non_contrib = AuthUserFactory() res = app.get( @@ -159,22 +147,6 @@ def test_unregistered_contributor_detail_show_up_as_name_associated_with_project assert res.json['data']['embeds']['users']['data']['attributes']['full_name'] == 'Rheisen Dennis' assert res.json['data']['attributes'].get('unregistered_contributor') == 'Nesiehr Sinned' - def test_node_contributor_detail_serializes_contributor_perms(self, app, user, project_public): - user_two = AuthUserFactory() - project_public.add_contributor(user_two, permissions.WRITE) - project_public.save() - - osf_group = OSFGroupFactory(creator=user) - osf_group.make_member(user_two) - project_public.add_osf_group(osf_group, permissions.ADMIN) - - url = self.make_resource_url(project_public._id, user_two._id) - res = app.get(url, auth=user.auth) - # Even though user_two has admin perms through group membership, - # contributor endpoints return contributor permissions - assert res.json['data']['attributes']['permission'] == permissions.WRITE - assert project_public.has_permission(user_two, permissions.ADMIN) is True - def test_detail_includes_index(self, app, user, project_public, url_public): res = app.get(url_public, auth=user.auth) data = res.json['data'] @@ -390,18 +362,6 @@ def test_remove_self_contributor_unique_admin(self, app, user, user_write_contri assert res.status_code == 400 assert user in project.contributors - def test_remove_contributor_osf_group_member_read(self, app, user, user_write_contrib, user_non_contrib, - project, url_user, url_user_write_contrib, url_user_non_contrib): - group_mem = AuthUserFactory() - group = OSFGroupFactory(creator=group_mem) - project.add_osf_group(group, permissions.READ) - res = app.delete( - url_user_write_contrib, - auth=group_mem.auth, - expect_errors=True - ) - assert res.status_code == 403 - def test_can_not_remove_only_bibliographic_contributor(self, app, user, project, user_write_contrib, url_user): project.add_permission( user_write_contrib, @@ -448,20 +408,6 @@ def test_remove_contributor_admin(self, app, user, user_write_contrib, project, assert res.status_code == 204 assert user_write_contrib not in project.contributors - def test_remove_contributor_osf_group_member_admin(self, app, user, user_write_contrib, project, - url_user_write_contrib): - with assert_latest_log(NodeLog.CONTRIB_REMOVED, project): - # Disconnect contributor_removed so that we don't check in files - # We can remove this when StoredFileNode is implemented in - # osf-models - group_mem = AuthUserFactory() - group = OSFGroupFactory(creator=group_mem) - project.add_osf_group(group, permissions.ADMIN) - with disconnected_from_listeners(contributor_removed): - res = app.delete(url_user_write_contrib, auth=group_mem.auth) - assert res.status_code == 204 - assert user_write_contrib not in project.contributors - def test_remove_self_non_admin(self, app, user_non_contrib, project, url_user_non_contrib): with assert_latest_log(NodeLog.CONTRIB_REMOVED, project): project.add_contributor( diff --git a/api_tests/nodes/views/test_node_contributors_detail_update.py b/api_tests/nodes/views/test_node_contributors_detail_update.py index eed746be90a..0e183c97345 100644 --- a/api_tests/nodes/views/test_node_contributors_detail_update.py +++ b/api_tests/nodes/views/test_node_contributors_detail_update.py @@ -4,7 +4,6 @@ from osf.models import NodeLog from osf_tests.factories import ( ProjectFactory, - OSFGroupFactory, AuthUserFactory, ) from rest_framework import exceptions @@ -169,29 +168,6 @@ def test_change_contributor_non_admin_auth(self, app, user, contrib, project, ur assert project.get_permissions(contrib) == [permissions.READ, permissions.WRITE] assert project.get_visible(contrib) - def test_change_contributor_non_admin_osf_group_member_auth(self, app, user, contrib, project, url_contrib): - group_mem = AuthUserFactory() - group = OSFGroupFactory(creator=group_mem) - project.add_osf_group(group, permissions.WRITE) - res = app.put_json_api( - url_contrib, - { - 'data': { - 'id': contrib._id, - 'type': 'contributors', - 'attributes': { - 'permission': permissions.READ, - 'bibliographic': False - } - } - }, - auth=group_mem.auth, - expect_errors=True - ) - assert res.status_code == 403 - assert project.get_permissions(contrib) == [permissions.READ, permissions.WRITE] - assert project.get_visible(contrib) - def test_change_admin_self_without_other_admin(self, app, user, project, url_creator): res = app.put_json_api( url_creator, @@ -250,27 +226,6 @@ def test_change_contributor_correct_id(self, app, user, contrib, project, url_co ) assert res.status_code == 200 - def test_change_contributor_admin_osf_group_permissions(self, app, user, contrib, project, url_contrib): - group_mem = AuthUserFactory() - group = OSFGroupFactory(creator=group_mem) - project.add_osf_group(group, permissions.ADMIN) - res = app.put_json_api( - url_contrib, - { - 'data': { - 'id': f'{project._id}-{contrib._id}', - 'type': 'contributors', - 'attributes': { - 'permission': permissions.ADMIN, - 'bibliographic': True - } - } - }, - auth=group_mem.auth, - expect_errors=True - ) - assert res.status_code == 200 - def test_remove_all_bibliographic_statuses_contributors(self, app, user, contrib, project, url_creator): project.set_visible(contrib, False, save=True) res = app.put_json_api( diff --git a/api_tests/nodes/views/test_node_contributors_list.py b/api_tests/nodes/views/test_node_contributors_list.py index a6a915aba4d..81910a6ef55 100644 --- a/api_tests/nodes/views/test_node_contributors_list.py +++ b/api_tests/nodes/views/test_node_contributors_list.py @@ -9,7 +9,6 @@ from osf_tests.factories import ( fake_email, AuthUserFactory, - OSFGroupFactory, ProjectFactory, UnconfirmedUserFactory, UserFactory, @@ -176,18 +175,6 @@ def test_return( assert res.status_code == 403 assert 'detail' in res.json['errors'][0] - # test_return_private_contributor_list_logged_in_osf_group_member - res = app.get(url_private, auth=user_two.auth, expect_errors=True) - osf_group = OSFGroupFactory(creator=user_two) - project_private.add_osf_group(osf_group, permissions.READ) - res = app.get(url_private, auth=user_two.auth) - assert res.status_code == 200 - assert res.content_type == 'application/vnd.api+json' - assert len(res.json['data']) == 1 - assert res.json['data'][0]['id'] == make_contrib_id( - project_private._id, user._id - ) - def test_return_public_contributor_list_logged_out( self, app, user, user_two, project_public, url_public, make_contrib_id ): @@ -646,25 +633,6 @@ def test_adds_contributor_public_project_non_admin( project_public.reload() assert user_three not in project_public.contributors.all() - def test_adds_contributor_public_project_non_admin_osf_group( - self, - app, - user, - user_two, - user_three, - project_public, - data_user_three, - url_public, - ): - group = OSFGroupFactory(creator=user_two) - project_public.add_osf_group(group, permissions.WRITE) - res = app.post_json_api( - url_public, data_user_three, auth=user_two.auth, expect_errors=True - ) - assert res.status_code == 403 - project_public.reload() - assert user_three not in project_public.contributors.all() - def test_adds_contributor_public_project_non_contributor( self, app, user_two, user_three, project_public, data_user_three, url_public ): @@ -693,27 +661,6 @@ def test_adds_contributor_private_project_admin( project_private.reload() assert user_two in project_private.contributors - def test_adds_contributor_private_project_osf_group_admin_perms( - self, - app, - user, - user_two, - user_three, - project_private, - data_user_two, - url_private, - ): - osf_group = OSFGroupFactory(creator=user_three) - project_private.add_osf_group(osf_group, permissions.ADMIN) - res = app.post_json_api(url_private, data_user_two, auth=user_three.auth) - assert res.status_code == 201 - assert res.json['data']['id'] == '{}-{}'.format( - project_private._id, user_two._id - ) - - project_private.reload() - assert user_two in project_private.contributors - def test_adds_contributor_without_bibliographic_private_project_admin( self, app, user, user_two, project_private, url_private ): @@ -1255,15 +1202,15 @@ def test_add_contributor_validation( @pytest.mark.django_db @pytest.mark.enable_bookmark_creation @pytest.mark.enable_enqueue_task +@pytest.mark.usefixtures('mock_send_grid') class TestNodeContributorCreateEmail(NodeCRUDTestCase): @pytest.fixture() def url_project_contribs(self, project_public): return f'/{API_BASE}nodes/{project_public._id}/contributors/' - @mock.patch('framework.auth.views.mails.send_mail') def test_add_contributor_no_email_if_false( - self, mock_mail, app, user, url_project_contribs + self, mock_send_grid, app, user, url_project_contribs ): url = f'{url_project_contribs}?send_email=false' payload = { @@ -1274,11 +1221,10 @@ def test_add_contributor_no_email_if_false( } res = app.post_json_api(url, payload, auth=user.auth) assert res.status_code == 201 - assert mock_mail.call_count == 0 + assert mock_send_grid.call_count == 0 - @mock.patch('framework.auth.views.mails.send_mail') def test_add_contributor_sends_email( - self, mock_mail, app, user, user_two, url_project_contribs + self, mock_send_grid, app, user, user_two, url_project_contribs ): url = f'{url_project_contribs}?send_email=default' payload = { @@ -1293,7 +1239,7 @@ def test_add_contributor_sends_email( res = app.post_json_api(url, payload, auth=user.auth) assert res.status_code == 201 - assert mock_mail.call_count == 1 + assert mock_send_grid.call_count == 1 @mock.patch('website.project.signals.contributor_added.send') def test_add_contributor_signal_if_default( @@ -1334,9 +1280,8 @@ def test_add_contributor_signal_preprint_email_disallowed( == 'preprint is not a valid email preference.' ) - @mock.patch('framework.auth.views.mails.send_mail') def test_add_unregistered_contributor_sends_email( - self, mock_mail, app, user, url_project_contribs + self, mock_send_grid, app, user, url_project_contribs ): url = f'{url_project_contribs}?send_email=default' payload = { @@ -1347,7 +1292,7 @@ def test_add_unregistered_contributor_sends_email( } res = app.post_json_api(url, payload, auth=user.auth) assert res.status_code == 201 - assert mock_mail.call_count == 1 + assert mock_send_grid.call_count == 1 @mock.patch('website.project.signals.unreg_contributor_added.send') def test_add_unregistered_contributor_signal_if_default( @@ -1382,9 +1327,8 @@ def test_add_unregistered_contributor_signal_preprint_email_disallowed( == 'preprint is not a valid email preference.' ) - @mock.patch('framework.auth.views.mails.send_mail') def test_add_contributor_invalid_send_email_param( - self, mock_mail, app, user, url_project_contribs + self, mock_send_grid, app, user, url_project_contribs ): url = f'{url_project_contribs}?send_email=true' payload = { @@ -1398,11 +1342,10 @@ def test_add_contributor_invalid_send_email_param( assert ( res.json['errors'][0]['detail'] == 'true is not a valid email preference.' ) - assert mock_mail.call_count == 0 + assert mock_send_grid.call_count == 0 - @mock.patch('framework.auth.views.mails.send_mail') def test_add_unregistered_contributor_without_email_no_email( - self, mock_mail, app, user, url_project_contribs + self, mock_send_grid, app, user, url_project_contribs ): url = f'{url_project_contribs}?send_email=default' payload = { @@ -1418,7 +1361,7 @@ def test_add_unregistered_contributor_without_email_no_email( res = app.post_json_api(url, payload, auth=user.auth) assert contributor_added in mock_signal.signals_sent() assert res.status_code == 201 - assert mock_mail.call_count == 0 + assert mock_send_grid.call_count == 0 @pytest.mark.django_db diff --git a/api_tests/nodes/views/test_node_detail.py b/api_tests/nodes/views/test_node_detail.py index b16e27376b7..1aa6966141a 100644 --- a/api_tests/nodes/views/test_node_detail.py +++ b/api_tests/nodes/views/test_node_detail.py @@ -18,7 +18,6 @@ PrivateLinkFactory, PreprintFactory, ForkFactory, - OSFGroupFactory, WithdrawnRegistrationFactory, DraftNodeFactory, ) @@ -138,13 +137,6 @@ def test_return_project_details( assert res.status_code == 403 assert 'detail' in res.json['errors'][0] - # test_return_project_where_you_have_osf_group_membership - osf_group = OSFGroupFactory(creator=user_two) - project_private.add_osf_group(osf_group, permissions.WRITE) - res = app.get(url_private, auth=user_two.auth) - assert res.status_code == 200 - assert project_private.has_permission(user_two, permissions.WRITE) is True - # test_draft_node_not_returned_under_node_detail_endpoint draft_node_url = f'/{API_BASE}nodes/{draft_node._id}/' res = app.get(draft_node_url, auth=user.auth, expect_errors=True) @@ -405,28 +397,6 @@ def test_node_show_correct_children_count(self, app, user, user_two, project_pub res = app.get(node_children_url, auth=user_two.auth) assert len(res.json['data']) == 1 - # Explicit Member of OSFGroup can view child count - user_three = AuthUserFactory() - group = OSFGroupFactory(creator=user_three) - res = app.get(url, auth=user_three.auth) - assert res.json['data']['relationships']['children']['links']['related']['meta']['count'] == 0 - child.add_osf_group(group, permissions.READ) - res = app.get(url, auth=user_three.auth) - assert res.json['data']['relationships']['children']['links']['related']['meta']['count'] == 1 - res = app.get(node_children_url, auth=user_three.auth) - assert len(res.json['data']) == 1 - - # Implicit admin group member can view child count - child.remove_osf_group(group) - res = app.get(url, auth=user_three.auth) - assert res.json['data']['relationships']['children']['links']['related']['meta']['count'] == 0 - - project_public.add_osf_group(group, permissions.ADMIN) - res = app.get(url, auth=user_three.auth) - assert res.json['data']['relationships']['children']['links']['related']['meta']['count'] == 1 - res = app.get(node_children_url, auth=user_three.auth) - assert len(res.json['data']) == 1 - # Grandchildren not shown. Children show one level. grandparent = AuthUserFactory() NodeFactory(parent=child, creator=user) @@ -567,46 +537,6 @@ def test_current_user_permissions(self, app, user, url_public, project_public, u assert permissions.READ in res.json['data']['attributes']['current_user_permissions'] assert res.json['data']['attributes']['current_user_is_contributor_or_group_member'] is False - # Read group member has "read" permissions - group_member = AuthUserFactory() - osf_group = OSFGroupFactory(creator=group_member) - project_public.add_osf_group(osf_group, permissions.READ) - res = app.get(url, auth=group_member.auth) - assert project_public.has_permission(group_member, permissions.READ) - assert permissions.READ in res.json['data']['attributes']['current_user_permissions'] - assert res.json['data']['attributes']['current_user_is_contributor_or_group_member'] is True - - # Write group member has "read" and "write" permissions - group_member = AuthUserFactory() - osf_group = OSFGroupFactory(creator=group_member) - project_public.add_osf_group(osf_group, permissions.WRITE) - res = app.get(url, auth=group_member.auth) - assert res.json['data']['attributes']['current_user_permissions'] == [permissions.WRITE, permissions.READ] - assert res.json['data']['attributes']['current_user_is_contributor_or_group_member'] is True - - # Admin group member has "read" and "write" and "admin" permissions - group_member = AuthUserFactory() - osf_group = OSFGroupFactory(creator=group_member) - project_public.add_osf_group(osf_group, permissions.ADMIN) - res = app.get(url, auth=group_member.auth) - assert res.json['data']['attributes']['current_user_permissions'] == [permissions.ADMIN, permissions.WRITE, permissions.READ] - assert res.json['data']['attributes']['current_user_is_contributor_or_group_member'] is True - - # make sure 'read' is there for implicit read group members - comp = NodeFactory(parent=project_public, is_public=True) - comp_url = f'/{API_BASE}nodes/{comp._id}/?version=2.11' - res = app.get(comp_url, auth=group_member.auth) - assert project_public.has_permission(user, permissions.ADMIN) - assert permissions.READ in res.json['data']['attributes']['current_user_permissions'] - assert res.json['data']['attributes']['current_user_is_contributor_or_group_member'] is False - - # ensure 'read' is still included with older versions - project_public.remove_osf_group(osf_group) - res = app.get(url_public, auth=group_member.auth) - assert not project_public.has_permission(group_member, permissions.READ) - assert permissions.READ in res.json['data']['attributes']['current_user_permissions'] - assert res.json['data']['attributes']['current_user_is_contributor_or_group_member'] is False - # superusers current permissions are None superuser = AuthUserFactory() superuser.is_superuser = True diff --git a/api_tests/nodes/views/test_node_detail_delete.py b/api_tests/nodes/views/test_node_detail_delete.py index 39185de3772..9c2fe078968 100644 --- a/api_tests/nodes/views/test_node_detail_delete.py +++ b/api_tests/nodes/views/test_node_detail_delete.py @@ -11,7 +11,6 @@ AuthUserFactory, PreprintFactory, IdentifierFactory, - OSFGroupFactory, ) from tests.utils import assert_latest_log from website.views import find_bookmark_collection @@ -69,17 +68,6 @@ def test_deletes_invalid_node( assert res.status_code == 404 assert 'detail' in res.json['errors'][0] - def test_delete_osf_group_improper_permissions( - self, app, user, user_two, project_public, project_private, url_public, url_private, url_fake - ): - osf_group = OSFGroupFactory(creator=user_two) - project_private.add_osf_group(osf_group, permissions.READ) - res = app.delete(url_private, auth=user_two.auth, expect_errors=True) - project_private.reload() - assert res.status_code == 403 - assert project_private.is_deleted is False - assert 'detail' in res.json['errors'][0] - def test_deletes_private_node_logged_in_read_only_contributor(self, app, user_two, project_private, url_private): project_private.add_contributor( user_two, diff --git a/api_tests/nodes/views/test_node_detail_update.py b/api_tests/nodes/views/test_node_detail_update.py index 0b43e5cf398..a3b9103d07e 100644 --- a/api_tests/nodes/views/test_node_detail_update.py +++ b/api_tests/nodes/views/test_node_detail_update.py @@ -17,7 +17,6 @@ AuthUserFactory, PreprintFactory, IdentifierFactory, - OSFGroupFactory, ) from tests.base import fake from tests.utils import assert_latest_log, assert_latest_log_not @@ -329,25 +328,6 @@ def test_update_errors( assert res.status_code == 403 assert 'detail' in res.json['errors'][0] - # test_update_private_project_group_has_read_perms - osf_group = OSFGroupFactory(creator=user_two) - project_private.add_osf_group(osf_group, permissions.READ) - res = app.put_json_api(url_private, { - 'data': { - 'id': project_private._id, - 'type': 'nodes', - 'attributes': { - 'title': title_new, - 'description': description_new, - 'category': category_new, - 'public': False - } - } - }, auth=user_two.auth, expect_errors=True) - assert project_private.has_permission(user_two, permissions.READ) is True - assert res.status_code == 403 - assert 'detail' in res.json['errors'][0] - def test_update_public_project_logged_in( self, app, user, title_new, description_new, category_new, project_public, url_public): @@ -373,32 +353,6 @@ def test_update_public_project_logged_in( assert NodeLog.EDITED_DESCRIPTION in log_actions assert NodeLog.CATEGORY_UPDATED in log_actions - def test_update_public_project_osf_group_member( - self, app, user_two, title_new, description_new, - category_new, project_public, url_public): - osf_group = OSFGroupFactory(creator=user_two) - project_public.add_osf_group(osf_group, permissions.WRITE) - res = app.put_json_api(url_public, { - 'data': { - 'id': project_public._id, - 'type': 'nodes', - 'attributes': { - 'title': title_new, - 'description': description_new, - 'category': category_new, - } - } - }, auth=user_two.auth) - assert res.status_code == 200 - assert res.content_type == 'application/vnd.api+json' - assert res.json['data']['attributes']['title'] == title_new - assert res.json['data']['attributes']['description'] == description_new - assert res.json['data']['attributes']['category'] == category_new - log_actions = project_public.logs.values_list('action', flat=True) - assert NodeLog.CATEGORY_UPDATED in log_actions - assert NodeLog.EDITED_TITLE in log_actions - assert NodeLog.EDITED_DESCRIPTION in log_actions - def test_cannot_update_a_registration(self, app, user, project_public): registration = RegistrationFactory( project=project_public, creator=user) diff --git a/api_tests/nodes/views/test_node_draft_registration_list.py b/api_tests/nodes/views/test_node_draft_registration_list.py index 5e46b46b4c0..08099337dfd 100644 --- a/api_tests/nodes/views/test_node_draft_registration_list.py +++ b/api_tests/nodes/views/test_node_draft_registration_list.py @@ -11,7 +11,6 @@ RegistrationProviderFactory, AuthUserFactory, CollectionFactory, - OSFGroupFactory, DraftRegistrationFactory, ) from osf.utils import permissions @@ -51,15 +50,7 @@ def user_non_contrib(self): return AuthUserFactory() @pytest.fixture() - def group_mem(self): - return AuthUserFactory() - - @pytest.fixture() - def group(self, group_mem): - return OSFGroupFactory(creator=group_mem) - - @pytest.fixture() - def project_public(self, user, user_admin_contrib, user_write_contrib, user_read_contrib, group, group_mem): + def project_public(self, user, user_admin_contrib, user_write_contrib, user_read_contrib): project_public = ProjectFactory(is_public=True, creator=user) project_public.add_contributor( user_write_contrib, @@ -71,7 +62,6 @@ def project_public(self, user, user_admin_contrib, user_write_contrib, user_read user_admin_contrib, permissions=permissions.ADMIN) project_public.save() - project_public.add_osf_group(group, permissions.ADMIN) project_public.add_tag('hello', Auth(user), save=True) return project_public diff --git a/api_tests/nodes/views/test_node_files_list.py b/api_tests/nodes/views/test_node_files_list.py index ce01ef7e942..c07ce12a217 100644 --- a/api_tests/nodes/views/test_node_files_list.py +++ b/api_tests/nodes/views/test_node_files_list.py @@ -22,10 +22,8 @@ from osf_tests.factories import ( ProjectFactory, AuthUserFactory, - OSFGroupFactory, PrivateLinkFactory ) -from osf.utils.permissions import READ from dateutil.parser import parse as parse_date from website import settings from osf.features import ENABLE_GV @@ -217,16 +215,6 @@ def test_returns_private_files_logged_in_non_contributor(self): assert res.status_code == 403 assert 'detail' in res.json['errors'][0] - def test_returns_private_files_logged_in_osf_group_member(self): - group_mem = AuthUserFactory() - group = OSFGroupFactory(creator=group_mem) - self.project.add_osf_group(group, READ) - res = self.app.get( - self.private_url, - auth=group_mem.auth, - expect_errors=True) - assert res.status_code == 200 - def test_returns_addon_folders(self): user_auth = Auth(self.user) res = self.app.get(self.private_url, auth=self.user.auth) @@ -541,18 +529,6 @@ def test_returns_private_files_logged_in_non_contributor(self): assert res.status_code == 403 assert 'detail' in res.json['errors'][0] - @responses.activate - def test_returns_private_files_logged_in_osf_group_member(self): - self.configure_addon(self.project) - group_mem = AuthUserFactory() - group = OSFGroupFactory(creator=group_mem) - self.project.add_osf_group(group, READ) - with self.fake_gv.run_fake(): - res = self.app.get( - self.private_url, auth=group_mem.auth, expect_errors=True - ) - assert res.status_code == 200 - class TestNodeFilesListFiltering(ApiTestCase): diff --git a/api_tests/nodes/views/test_node_forks_list.py b/api_tests/nodes/views/test_node_forks_list.py index 24f5f50f924..8fc9f9eb35b 100644 --- a/api_tests/nodes/views/test_node_forks_list.py +++ b/api_tests/nodes/views/test_node_forks_list.py @@ -6,13 +6,11 @@ from osf_tests.factories import ( NodeFactory, ProjectFactory, - OSFGroupFactory, RegistrationFactory, AuthUserFactory, ForkFactory ) from rest_framework import exceptions -from website import mails from osf.utils import permissions from api.nodes.serializers import NodeForksSerializer @@ -163,19 +161,6 @@ def test_authenticated_contributor_can_access_private_node_forks_list( forked_from = data['embeds']['forked_from']['data'] assert forked_from['id'] == private_project._id - group_mem = AuthUserFactory() - group = OSFGroupFactory(creator=group_mem) - private_project.add_osf_group(group, permissions.READ) - private_fork.add_osf_group(group, permissions.READ) - res = app.get( - private_project_url, - auth=group_mem.auth) - assert res.status_code == 200 - assert len(res.json['data']) == 1 - data = res.json['data'][0] - assert data['attributes']['title'] == 'Fork of ' + \ - private_project.title - assert data['id'] == private_fork._id def test_node_forks_list_errors(self, app, private_project_url): @@ -218,6 +203,7 @@ def test_forks_list_does_not_show_registrations_of_forks( @pytest.mark.django_db +@pytest.mark.usefixtures('mock_send_grid') class TestNodeForkCreate: @pytest.fixture() @@ -345,15 +331,6 @@ def test_can_fork_private_node_logged_in_contributor( forked_from = data['embeds']['forked_from']['data'] assert forked_from['id'] == private_project._id - # test_group_member_read_can_create_fork_of_private_node - group_mem = AuthUserFactory() - group = OSFGroupFactory(creator=group_mem) - private_project.add_osf_group(group, permissions.READ) - res = app.post_json_api( - private_project_url, - fork_data, auth=user.auth) - assert res.status_code == 201 - def test_fork_private_components_no_access( self, app, user_two, public_project, fork_data, public_project_url): @@ -442,36 +419,26 @@ def test_read_only_contributor_can_fork_private_registration( def test_send_email_success( self, app, user, public_project_url, - fork_data_with_title, public_project): - - with mock.patch.object(mails, 'send_mail', return_value=None) as mock_send_mail: - res = app.post_json_api( - public_project_url, - fork_data_with_title, - auth=user.auth) - assert res.status_code == 201 - assert res.json['data']['id'] == public_project.forks.first()._id - mock_send_mail.assert_called_with( - user.email, - mails.FORK_COMPLETED, - title=public_project.title, - guid=res.json['data']['id'], - can_change_preferences=False) + fork_data_with_title, public_project, mock_send_grid): + + res = app.post_json_api( + public_project_url, + fork_data_with_title, + auth=user.auth) + assert res.status_code == 201 + assert res.json['data']['id'] == public_project.forks.first()._id + call_args = mock_send_grid.call_args[1] + assert call_args['to_addr'] == user.email + assert call_args['subject'] == 'Your fork has completed' def test_send_email_failed( self, app, user, public_project_url, - fork_data_with_title, public_project): + fork_data_with_title, public_project, mock_send_grid): with mock.patch.object(NodeForksSerializer, 'save', side_effect=Exception()): - with mock.patch.object(mails, 'send_mail', return_value=None) as mock_send_mail: - with pytest.raises(Exception): - app.post_json_api( - public_project_url, - fork_data_with_title, - auth=user.auth) - mock_send_mail.assert_called_with( - user.email, - mails.FORK_FAILED, - title=public_project.title, - guid=public_project._id, - can_change_preferences=False) + with pytest.raises(Exception): + app.post_json_api( + public_project_url, + fork_data_with_title, + auth=user.auth) + assert mock_send_grid.called diff --git a/api_tests/nodes/views/test_node_groups.py b/api_tests/nodes/views/test_node_groups.py deleted file mode 100644 index c1aa3a3e427..00000000000 --- a/api_tests/nodes/views/test_node_groups.py +++ /dev/null @@ -1,454 +0,0 @@ -import pytest -from guardian.shortcuts import get_perms -from waffle.testutils import override_flag - -from api.base.settings.defaults import API_BASE -from framework.auth.core import Auth -from osf.utils import permissions -from osf_tests.factories import ( - ProjectFactory, - AuthUserFactory, - OSFGroupFactory, -) -from osf.features import OSF_GROUPS - - -@pytest.fixture() -def write_contrib(): - return AuthUserFactory() - -@pytest.fixture() -def read_contrib(): - return AuthUserFactory() - -@pytest.fixture() -def non_contrib(): - return AuthUserFactory() - -@pytest.fixture() -def member(): - return AuthUserFactory() - -@pytest.fixture() -def manager(): - return AuthUserFactory() - -@pytest.fixture() -def osf_group(member, manager): - group = OSFGroupFactory(creator=manager, name='Platform Team') - group.make_member(member, auth=Auth(manager)) - return group - -@pytest.fixture() -def private_project(write_contrib, read_contrib): - project = ProjectFactory(is_public=False) - project.add_contributor(read_contrib, permissions=permissions.READ) - project.add_contributor(write_contrib, permissions=permissions.WRITE, save=True) - return project - -@pytest.fixture() -def public_project(write_contrib, read_contrib): - project = ProjectFactory(is_public=True) - project.add_contributor(read_contrib, permissions=permissions.READ) - project.add_contributor(write_contrib, permissions=permissions.WRITE, save=True) - return project - -@pytest.fixture() -def public_url(public_project): - return f'/{API_BASE}nodes/{public_project._id}/groups/' - -@pytest.fixture() -def private_url(private_project): - return f'/{API_BASE}nodes/{private_project._id}/groups/' - -@pytest.fixture() -def public_detail_url(public_url, osf_group): - return f'{public_url}{osf_group._id}/' - -@pytest.fixture() -def make_node_group_payload(): - def payload(attributes, relationships=None): - payload_data = { - 'data': { - 'type': 'node-groups', - 'attributes': attributes, - } - } - if relationships: - payload_data['data']['relationships'] = relationships - - return payload_data - return payload - - -@pytest.mark.django_db -class TestNodeGroupsList: - @pytest.fixture() - def make_group_id(self): - def contrib_id(node, group): - return f'{node._id}-{group._id}' - return contrib_id - - def test_return(self, app, non_contrib, osf_group, member, manager, public_project, private_project, public_url, private_url, make_group_id): - with override_flag(OSF_GROUPS, active=True): - public_project.add_osf_group(osf_group, permissions.WRITE) - - # public url logged out - res = app.get(public_url) - resp_json = res.json['data'] - ids = [each['id'] for each in resp_json] - assert make_group_id(public_project, osf_group) in ids - assert resp_json[0]['attributes']['permission'] == permissions.WRITE - - # private project logged in - private_project.add_osf_group(osf_group, permissions.READ) - res = app.get(private_url, auth=private_project.creator.auth) - resp_json = res.json['data'] - ids = [each['id'] for each in resp_json] - assert make_group_id(private_project, osf_group) in ids - assert resp_json[0]['attributes']['permission'] == permissions.READ - - # private project logged out - res = app.get(private_url, expect_errors=True) - assert res.status_code == 401 - - # private project non_contrib - res = app.get(private_url, auth=non_contrib.auth, expect_errors=True) - assert res.status_code == 403 - - # private project group_member - res = app.get(private_url, auth=member.auth, expect_errors=True) - assert res.status_code == 200 - - # private project group_manager - res = app.get(private_url, auth=member.auth, expect_errors=True) - assert res.status_code == 200 - - def test_filter_groups(self, app, osf_group, private_project, manager, private_url, make_group_id): - with override_flag(OSF_GROUPS, active=True): - read_group = OSFGroupFactory(creator=manager, name='house') - write_group = OSFGroupFactory(creator=manager, name='doghouse') - private_project.add_osf_group(read_group, permissions.READ) - private_project.add_osf_group(write_group, permissions.WRITE) - private_project.add_osf_group(osf_group, permissions.ADMIN) - - # test filter on permission - url = private_url + '?filter[permission]=admin' - res = app.get(url, auth=private_project.creator.auth) - resp_json = res.json['data'] - ids = [each['id'] for each in resp_json] - assert make_group_id(private_project, osf_group) in ids - assert make_group_id(private_project, write_group) not in ids - assert make_group_id(private_project, read_group) not in ids - - url = private_url + '?filter[permission]=write' - res = app.get(url, auth=private_project.creator.auth) - resp_json = res.json['data'] - ids = [each['id'] for each in resp_json] - assert make_group_id(private_project, osf_group) in ids - assert make_group_id(private_project, write_group) in ids - assert make_group_id(private_project, read_group) not in ids - - url = private_url + '?filter[permission]=read' - res = app.get(url, auth=private_project.creator.auth) - resp_json = res.json['data'] - ids = [each['id'] for each in resp_json] - assert make_group_id(private_project, osf_group) in ids - assert make_group_id(private_project, write_group) in ids - assert make_group_id(private_project, read_group) in ids - - # test_filter_on_invalid_permission - url = private_url + '?filter[permission]=bad_perm' - res = app.get(url, auth=private_project.creator.auth, expect_errors=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'bad_perm is not a filterable permission.' - - url = private_url + '?filter[name]=Plat' - res = app.get(url, auth=private_project.creator.auth) - resp_json = res.json['data'] - ids = [each['id'] for each in resp_json] - assert make_group_id(private_project, osf_group) in ids - assert make_group_id(private_project, write_group) not in ids - assert make_group_id(private_project, read_group) not in ids - - url = private_url + '?filter[name]=house' - res = app.get(url, auth=private_project.creator.auth) - resp_json = res.json['data'] - ids = [each['id'] for each in resp_json] - assert make_group_id(private_project, osf_group) not in ids - assert make_group_id(private_project, write_group) in ids - assert make_group_id(private_project, read_group) in ids - - -@pytest.mark.django_db -class TestNodeGroupCreate: - - def test_create_node_groups(self, app, osf_group, public_url, non_contrib, member, manager, - public_project, write_contrib, make_node_group_payload): - with override_flag(OSF_GROUPS, active=True): - attributes = {'permission': permissions.WRITE} - relationships = { - 'groups': { - 'data': { - 'type': 'groups', - 'id': osf_group._id, - } - } - } - payload = make_node_group_payload(attributes=attributes, relationships=relationships) - - # test add group noncontrib fails - res = app.post_json_api(public_url, payload, auth=non_contrib, expect_errors=True) - assert res.status_code == 401 - - # add group with write permissions fails - res = app.post_json_api(public_url, payload, auth=write_contrib, expect_errors=True) - assert res.status_code == 401 - - # add group with admin on node but not manager in group - res = app.post_json_api(public_url, payload, auth=public_project.creator.auth, expect_errors=True) - assert res.status_code == 403 - - # create group with admin permissions on node and manager permissions in group - public_project.add_contributor(manager, permissions=permissions.ADMIN, auth=Auth(public_project.creator), save=True) - - # test_perm_not_specified - given write by default - relationship_only = make_node_group_payload(attributes={}, relationships=relationships) - res = app.post_json_api(public_url, relationship_only, auth=manager.auth) - assert res.status_code == 201 - assert res.json['data']['attributes']['permission'] == permissions.WRITE - assert osf_group._id in res.json['data']['relationships']['groups']['links']['related']['href'] - - public_project.remove_osf_group(osf_group) - - # test_relationship_not_specified - attributes_only = make_node_group_payload(attributes=attributes) - res = app.post_json_api(public_url, attributes_only, auth=manager.auth, expect_errors=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'Group relationship must be specified.' - - # test_group_is_invalid - relationships = { - 'groups': { - 'data': { - 'type': 'groups', - 'id': '12345', - } - } - } - invalid_group = make_node_group_payload(attributes=attributes, relationships=relationships) - res = app.post_json_api(public_url, invalid_group, auth=manager.auth, expect_errors=True) - assert res.status_code == 404 - assert res.json['errors'][0]['detail'] == 'Group {} is invalid.'.format('12345') - - # test_admin_perms - res = app.post_json_api(public_url, payload, auth=manager.auth) - assert public_project in osf_group.nodes - assert public_project.has_permission(member, permissions.WRITE) - assert res.json['data']['attributes']['permission'] == permissions.WRITE - assert osf_group._id in res.json['data']['relationships']['groups']['links']['related']['href'] - - # test creating group a second time fails - res = app.post_json_api(public_url, payload, auth=manager.auth, expect_errors=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'The group {} has already been added to the node {}'.format( - osf_group._id, public_project._id - ) - - # test incorrect permission string - public_project.remove_osf_group(osf_group) - payload['data']['attributes']['permission'] = 'not a real perm' - res = app.post_json_api(public_url, payload, auth=manager.auth, expect_errors=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'not a real perm is not a valid permission.' - - # test_incorrect_type - payload['data']['type'] = 'incorrect_type' - res = app.post_json_api(public_url, payload, auth=manager.auth, expect_errors=True) - assert res.status_code == 409 - - # test not a real group - payload['data']['type'] = 'node-groups' - payload['data']['relationships']['groups']['data']['id'] = 'not_a_real_group_id' - res = app.post_json_api(public_url, payload, auth=manager.auth, expect_errors=True) - assert res.status_code == 404 - - -@pytest.mark.django_db -class TestNodeGroupDetail: - - def test_node_group_detail(self, app, public_detail_url, osf_group, public_project): - with override_flag(OSF_GROUPS, active=True): - # res for group not attached to node raised permissions error - res = app.get(public_detail_url, expect_errors=True) - assert res.status_code == 404 - assert res.json['errors'][0]['detail'] == f'Group {osf_group._id} does not have permissions to node {public_project._id}.' - - public_project.add_osf_group(osf_group, permissions.WRITE) - - # test attributes - res = app.get(public_detail_url) - attributes = res.json['data']['attributes'] - assert attributes['date_created'] == osf_group.created.replace(tzinfo=None).isoformat() - assert attributes['date_modified'] == osf_group.modified.replace(tzinfo=None).isoformat() - assert attributes['name'] == osf_group.name - assert attributes['permission'] == permissions.WRITE - - # test relationships - relationships = res.json['data']['relationships'] - assert list(relationships.keys()) == ['groups'] - assert osf_group._id in relationships['groups']['links']['related']['href'] - - # get group that does not exist - res = app.get(public_detail_url.replace(osf_group._id, 'hellonotarealroute'), expect_errors=True) - assert res.status_code == 404 - - def test_node_group_detail_perms(self, app, non_contrib, osf_group, member, public_project, private_project, public_detail_url, private_url): - with override_flag(OSF_GROUPS, active=True): - public_project.add_osf_group(osf_group, permissions.READ) - private_project.add_osf_group(osf_group, permissions.WRITE) - private_detail_url = private_url + osf_group._id + '/' - - # nonauth - res = app.get(private_detail_url, expect_errors=True) - assert res.status_code == 401 - - res = app.get(public_detail_url) - assert res.status_code == 200 - - # noncontrib - res = app.get(private_detail_url, auth=non_contrib.auth, expect_errors=True) - assert res.status_code == 403 - - res = app.get(public_detail_url, auth=non_contrib.auth) - assert res.status_code == 200 - - # member - res = app.get(private_detail_url, auth=member.auth) - assert res.status_code == 200 - - res = app.get(public_detail_url, auth=member.auth) - assert res.status_code == 200 - - -@pytest.mark.django_db -class TestNodeGroupUpdate: - - def test_update_permission(self, app, public_detail_url, osf_group, write_contrib, non_contrib, - public_project, make_node_group_payload): - with override_flag(OSF_GROUPS, active=True): - attributes = {'permission': permissions.WRITE} - payload = make_node_group_payload(attributes=attributes) - - # group has not been added to the node - res = app.patch_json_api(public_detail_url, payload, auth=public_project.creator.auth, expect_errors=True) - assert res.status_code == 404 - - public_project.add_osf_group(osf_group, permissions.READ) - - # test id not present in request - res = app.patch_json_api(public_detail_url, payload, auth=public_project.creator.auth, expect_errors=True) - assert res.status_code == 400 - - # test passing invalid group_id to update - payload['data']['id'] = 'nope' - res = app.patch_json_api(public_detail_url, payload, auth=public_project.creator.auth, expect_errors=True) - assert res.status_code == 409 - - payload['data']['id'] = public_project._id + '-' + osf_group._id - - # test update not logged in fails - res = app.patch_json_api(public_detail_url, payload, expect_errors=True) - assert res.status_code == 401 - - # test update noncontrib in fails - res = app.patch_json_api(public_detail_url, payload, auth=non_contrib.auth, expect_errors=True) - assert res.status_code == 403 - - # test update as node write contrib fails - res = app.patch_json_api(public_detail_url, payload, auth=write_contrib.auth, expect_errors=True) - assert res.status_code == 403 - - # test update as node admin - res = app.patch_json_api(public_detail_url, payload, auth=public_project.creator.auth) - res_json = res.json['data'] - assert res.status_code == 200 - assert not osf_group.is_member(public_project.creator.auth) - assert res_json['attributes']['permission'] == permissions.WRITE - assert permissions.WRITE_NODE in get_perms(osf_group.member_group, public_project) - - # test update invalid perm - payload['data']['attributes']['permission'] = 'bad_perm' - res = app.patch_json_api(public_detail_url, payload, auth=public_project.creator.auth, expect_errors=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'bad_perm is not a valid permission.' - - # test update no perm specified, perms unchanged - payload['data']['attributes'] = {} - res = app.patch_json_api(public_detail_url, payload, auth=public_project.creator.auth, expect_errors=True) - assert res.status_code == 200 - assert res_json['attributes']['permission'] == permissions.WRITE - - -@pytest.mark.django_db -class TestNodeGroupDelete: - - def test_delete_group(self, app, public_detail_url, public_project, osf_group, member, manager, non_contrib, write_contrib): - with override_flag(OSF_GROUPS, active=True): - public_project.add_contributor(manager, permissions=permissions.ADMIN) - payload = { - 'data': [ - {'type': 'node-groups', 'id': f'{public_project._id}-{osf_group._id}'} - ] - } - # group has not been added to the node - res = app.delete_json_api(public_detail_url, payload, auth=public_project.creator.auth, expect_errors=True) - assert res.status_code == 404 - - public_project.add_osf_group(osf_group, permissions.WRITE) - - # test member with write permission cannot remove group - res = app.delete_json_api(public_detail_url, payload, auth=member.auth, expect_errors=True) - assert res.status_code == 403 - - # not logged in user cannot remove group - res = app.delete_json_api(public_detail_url, payload, expect_errors=True) - assert res.status_code == 401 - - # non contributor cannot remove group - res = app.delete_json_api(public_detail_url, payload, auth=non_contrib.auth, expect_errors=True) - assert res.status_code == 403 - - # write contributor cannot remove group - res = app.delete_json_api(public_detail_url, payload, auth=write_contrib.auth, expect_errors=True) - assert res.status_code == 403 - - # test manager on group can remove group - res = app.delete_json_api(public_detail_url, payload, auth=manager.auth) - assert res.status_code == 204 - assert osf_group not in public_project.osf_groups - - # test member with admin permissions can remove group - public_project.add_osf_group(osf_group, permissions.ADMIN) - res = app.delete_json_api(public_detail_url, payload, auth=member.auth) - assert res.status_code == 204 - assert osf_group not in public_project.osf_groups - - second_group = OSFGroupFactory(creator=non_contrib) - second_group.make_member(member) - public_project.add_osf_group(second_group, permissions.WRITE) - - # test member with write cannot remove group - second_payload = { - 'data': [ - {'type': 'node-groups', 'id': f'{public_project._id}-{second_group._id}'} - ] - } - second_url = f'/{API_BASE}nodes/{public_project._id}/groups/{second_group._id}/' - res = app.delete_json_api(second_url, second_payload, auth=member.auth, expect_errors=True) - assert res.status_code == 403 - - # test manager can remove the group (even though they are not an admin contributor) - res = app.delete_json_api(second_url, second_payload, auth=non_contrib.auth, expect_errors=True) - assert res.status_code == 204 - assert second_group not in public_project.osf_groups diff --git a/api_tests/nodes/views/test_node_implicit_contributors_list.py b/api_tests/nodes/views/test_node_implicit_contributors_list.py index c27591a2e44..53b72df0366 100644 --- a/api_tests/nodes/views/test_node_implicit_contributors_list.py +++ b/api_tests/nodes/views/test_node_implicit_contributors_list.py @@ -3,11 +3,9 @@ from api.base.settings.defaults import API_BASE from osf_tests.factories import ( ProjectFactory, - OSFGroupFactory, AuthUserFactory, NodeFactory ) -from osf.utils.permissions import READ @pytest.fixture() @@ -59,15 +57,3 @@ def test_list_and_filter_implicit_contributors(self, app, component, admin_contr assert res.status_code == 200 assert res.content_type == 'application/vnd.api+json' assert len(res.json['data']) == 0 - - def test_osf_group_members_can_view_implicit_contributors(self, app, component, admin_contributor, implicit_contributor): - group_mem = AuthUserFactory() - group = OSFGroupFactory(creator=group_mem) - component.add_osf_group(group, READ) - - url = f'/{API_BASE}nodes/{component._id}/implicit_contributors/' - res = app.get(url, auth=group_mem.auth) - assert res.status_code == 200 - assert res.content_type == 'application/vnd.api+json' - assert len(res.json['data']) == 1 - assert res.json['data'][0]['id'] == implicit_contributor._id diff --git a/api_tests/nodes/views/test_node_institutions_list.py b/api_tests/nodes/views/test_node_institutions_list.py index 3ad46e2a6b3..0ddf17b0355 100644 --- a/api_tests/nodes/views/test_node_institutions_list.py +++ b/api_tests/nodes/views/test_node_institutions_list.py @@ -1,7 +1,6 @@ import pytest -from osf_tests.factories import InstitutionFactory, NodeFactory, AuthUserFactory, OSFGroupFactory -from osf.utils.permissions import READ +from osf_tests.factories import InstitutionFactory, NodeFactory, AuthUserFactory from api.base.settings.defaults import API_BASE @@ -59,13 +58,6 @@ def test_node_institution_detail( assert res.status_code == 200 assert len(res.json['data']) == 0 - # test_osf_group_member_can_view_node_institutions - group_mem = AuthUserFactory() - group = OSFGroupFactory(creator=group_mem) - node_one.add_osf_group(group, READ) - res = app.get(node_one_url) - assert res.status_code == 200 - # test_non_contrib node_one.is_public = False node_one.save() diff --git a/api_tests/nodes/views/test_node_linked_nodes.py b/api_tests/nodes/views/test_node_linked_nodes.py index a4b4662706a..a09a0c632c7 100644 --- a/api_tests/nodes/views/test_node_linked_nodes.py +++ b/api_tests/nodes/views/test_node_linked_nodes.py @@ -4,11 +4,9 @@ from framework.auth.core import Auth from osf_tests.factories import ( NodeFactory, - OSFGroupFactory, AuthUserFactory, NodeRelationFactory, ) -from osf.utils.permissions import WRITE, READ from website.project.signals import contributor_removed from api_tests.utils import disconnected_from_listeners @@ -112,13 +110,6 @@ def test_get_relationship_linked_nodes( res = app.get(url_private, expect_errors=True) assert res.status_code == 401 - # test_get_private_relationship_linked_nodes_read_group_mem - group_mem = AuthUserFactory() - group = OSFGroupFactory(creator=group_mem) - node_linking_private.add_osf_group(group, READ) - res = app.get(url_private, auth=group_mem.auth) - assert res.status_code == 200 - def test_post_contributing_node( self, app, user, node_contrib, node_private, make_payload, url_private): @@ -181,26 +172,6 @@ def test_post_private_node( assert node_other._id not in ids assert node_private._id in ids - # test_group_member_can_post_with_write - group_mem = AuthUserFactory() - group = OSFGroupFactory(creator=group_mem) - node_linking_private.add_osf_group(group, READ) - res = app.post_json_api( - url_private, - make_payload([node_other._id]), - auth=group_mem.auth, expect_errors=True - ) - assert res.status_code == 403 - - node_linking_private.update_osf_group(group, WRITE) - node_other.add_osf_group(group, WRITE) - res = app.post_json_api( - url_private, - make_payload([node_other._id]), - auth=group_mem.auth, expect_errors=True - ) - assert res.status_code == 201 - def test_post_mixed_nodes( self, app, user, node_private, node_other, node_contrib, make_payload, url_private): diff --git a/api_tests/nodes/views/test_node_linked_registration_create.py b/api_tests/nodes/views/test_node_linked_registration_create.py index 0abe55458b8..070f7a8c4cd 100644 --- a/api_tests/nodes/views/test_node_linked_registration_create.py +++ b/api_tests/nodes/views/test_node_linked_registration_create.py @@ -2,12 +2,7 @@ from api.base.settings.defaults import API_BASE from framework.auth.core import Auth -from osf_tests.factories import ( - AuthUserFactory, - OSFGroupFactory, - RegistrationFactory, - NodeRelationFactory, -) +from osf_tests.factories import RegistrationFactory, NodeRelationFactory from osf.utils.permissions import READ from rest_framework import exceptions from .utils import LinkedRegistrationsTestCase @@ -116,25 +111,6 @@ def test_non_contributor_cannot_create_linked_registrations_relationship( assert res.status_code == 403 assert res.json['errors'][0]['detail'] == exceptions.PermissionDenied.default_detail - def test_read_osf_group_mem_cannot_create_linked_registrations_relationship( - self, - app, - user_non_contrib, - node_private, - ): - group_mem = AuthUserFactory() - group = OSFGroupFactory(creator=group_mem) - node_private.add_osf_group(group, READ) - registration = RegistrationFactory(is_public=True) - res = self.make_request( - app, - node_id=node_private._id, - reg_id=registration._id, - auth=group_mem.auth, - expect_errors=True - ) - assert res.status_code == 403 - def test_unauthenticated_user_cannot_create_linked_registrations_relationship( self, app, diff --git a/api_tests/nodes/views/test_node_linked_registrations.py b/api_tests/nodes/views/test_node_linked_registrations.py index 1c900bade1b..b246d2ef5cf 100644 --- a/api_tests/nodes/views/test_node_linked_registrations.py +++ b/api_tests/nodes/views/test_node_linked_registrations.py @@ -3,11 +3,6 @@ from api.base.settings.defaults import API_BASE from osf.models import Outcome, Identifier, OutcomeArtifact from osf.utils.outcomes import ArtifactTypes -from osf_tests.factories import ( - AuthUserFactory, - OSFGroupFactory, -) -from osf.utils.permissions import READ from rest_framework import exceptions from .utils import LinkedRegistrationsTestCase @@ -130,28 +125,6 @@ def test_private_node_unauthenticated_user_cannot_view_linked_registrations(self assert res.status_code == 401 assert res.json['errors'][0]['detail'] == exceptions.NotAuthenticated.default_detail - def test_osf_group_member_read_can_view_linked_reg( - self, - app, - user_admin_contrib, - user_write_contrib, - user_read_contrib, - user_non_contrib, - registration, - node_public, - node_private - ): - group_mem = AuthUserFactory() - group = OSFGroupFactory(creator=group_mem) - node_private.add_osf_group(group, READ) - res = self.make_request( - app, - node_id=node_private._id, - auth=group_mem.auth, - expect_errors=True - ) - assert res.status_code == 200 - @pytest.mark.django_db class TestNodeLinkedRegistrationsRelationshipRetrieve(LinkedRegistrationsTestCase): @@ -263,22 +236,3 @@ def test_private_node_unauthenticated_user_cannot_view_linked_registrations_rela res = self.make_request(app, node_id=node_private._id, expect_errors=True) assert res.status_code == 401 assert res.json['errors'][0]['detail'] == exceptions.NotAuthenticated.default_detail - - def test_osf_group_member_can_view_linked_registration_relationship( - self, - app, - registration, - node_private, - node_public, - user_non_contrib - ): - group_mem = AuthUserFactory() - group = OSFGroupFactory(creator=group_mem) - node_private.add_osf_group(group, READ) - res = self.make_request( - app, - node_id=node_private._id, - auth=group_mem.auth, - expect_errors=True - ) - assert res.status_code == 200 diff --git a/api_tests/nodes/views/test_node_links_detail.py b/api_tests/nodes/views/test_node_links_detail.py index 80ad13cb755..0ad22bd6a78 100644 --- a/api_tests/nodes/views/test_node_links_detail.py +++ b/api_tests/nodes/views/test_node_links_detail.py @@ -5,11 +5,9 @@ from osf.models import NodeLog from osf_tests.factories import ( ProjectFactory, - OSFGroupFactory, RegistrationFactory, AuthUserFactory, ) -from osf.utils.permissions import WRITE, READ from rest_framework import exceptions from tests.utils import assert_latest_log @@ -109,13 +107,6 @@ def test_node_link_detail( assert 'errors' in target_node assert target_node['errors'][0]['detail'] == exceptions.PermissionDenied.default_detail - # test_returns_private_node_pointer_detail_logged_in_group_mem - group_mem = AuthUserFactory() - group = OSFGroupFactory(creator=group_mem) - private_project.add_osf_group(group, READ) - res = app.get(private_url, auth=group_mem.auth, expect_errors=True) - assert res.status_code == 200 - # test_self_link_points_to_node_link_detail_url res = app.get(public_url, auth=user.auth) assert res.status_code == 200 @@ -298,17 +289,6 @@ def test_deletes_private_node_pointer_logged_in_non_contrib( assert res.status_code == 403 assert 'detail' in res.json['errors'][0] - def test_deletes_private_node_pointer_logged_in_read_group_mem( - self, app, user_two, private_url, private_project): - group_mem = AuthUserFactory() - group = OSFGroupFactory(creator=group_mem) - private_project.add_osf_group(group, READ) - res = app.delete(private_url, auth=group_mem.auth, expect_errors=True) - assert res.status_code == 403 - private_project.update_osf_group(group, WRITE) - res = app.delete(private_url, auth=group_mem.auth, expect_errors=True) - assert res.status_code == 204 - def test_return_deleted_public_node_pointer( self, app, user, public_project, public_url): with assert_latest_log(NodeLog.POINTER_REMOVED, public_project): diff --git a/api_tests/nodes/views/test_node_links_list.py b/api_tests/nodes/views/test_node_links_list.py index 136778d6895..a244dc369db 100644 --- a/api_tests/nodes/views/test_node_links_list.py +++ b/api_tests/nodes/views/test_node_links_list.py @@ -6,10 +6,8 @@ from osf_tests.factories import ( ProjectFactory, RegistrationFactory, - OSFGroupFactory, AuthUserFactory ) -from osf.utils.permissions import WRITE, READ from rest_framework import exceptions from tests.utils import assert_latest_log @@ -103,16 +101,6 @@ def test_non_mutational_node_links_list_tests( assert res.status_code == 403 assert 'detail' in res.json['errors'][0] - # test_osf_group_member_read_can_view - group_mem = AuthUserFactory() - group = OSFGroupFactory(creator=group_mem) - private_project.add_osf_group(group, READ) - res = app.get( - private_url, - auth=group_mem.auth, - expect_errors=True) - assert res.status_code == 200 - # test_node_links_bad_version url = f'{public_url}?version=2.1' res = app.get(url, auth=user.auth, expect_errors=True) @@ -396,14 +384,6 @@ def test_creates_public_node_pointer_logged_in( assert res.status_code == 403 assert 'detail' in res.json['errors'][0] - group_mem = AuthUserFactory() - group = OSFGroupFactory(creator=group_mem) - public_project.add_osf_group(group, READ) - res = app.post_json_api( - public_url, public_payload, - auth=group_mem.auth, expect_errors=True) - assert res.status_code == 403 - res = app.post_json_api(public_url, public_payload, auth=user.auth) assert res.status_code == 201 assert res.content_type == 'application/vnd.api+json' @@ -420,16 +400,6 @@ def test_creates_private_node_pointer_logged_out( assert res.status_code == 401 assert 'detail' in res.json['errors'][0] - def test_creates_private_node_pointer_group_member( - self, app, private_project, private_pointer_project, private_url, make_payload): - group_mem = AuthUserFactory() - group = OSFGroupFactory(creator=group_mem) - private_project.add_osf_group(group, WRITE) - private_payload = make_payload(id=private_pointer_project._id) - res = app.post_json_api( - private_url, private_payload, auth=group_mem.auth) - assert res.status_code == 201 - def test_creates_private_node_pointer_logged_in_contributor( self, app, user, private_pointer_project, private_url, make_payload): private_payload = make_payload(id=private_pointer_project._id) diff --git a/api_tests/nodes/views/test_node_list.py b/api_tests/nodes/views/test_node_list.py index f71862ca8b0..15398613ea3 100644 --- a/api_tests/nodes/views/test_node_list.py +++ b/api_tests/nodes/views/test_node_list.py @@ -22,7 +22,6 @@ PreprintFactory, InstitutionFactory, RegionFactory, - OSFGroupFactory, DraftNodeFactory, ) from addons.osfstorage.settings import DEFAULT_REGION_ID @@ -128,14 +127,6 @@ def test_return( assert private_project._id not in ids assert draft_node._id not in ids - # test_returns_nodes_through_which_you_have_perms_through_osf_groups - group = OSFGroupFactory(creator=user) - another_project = ProjectFactory() - another_project.add_osf_group(group, permissions.READ) - res = app.get(url, auth=user.auth) - ids = [each['id'] for each in res.json['data']] - assert another_project._id in ids - def test_node_list_does_not_returns_registrations( self, app, user, public_project, url): registration = RegistrationFactory( @@ -220,13 +211,6 @@ def test_default_node_permission_queryset(self, app, url, private_project, user) ProjectFactory(is_public=True) assert default_node_permission_queryset(user_2, Node).count() == 2 - # Node read group member - project_3 = ProjectFactory(is_public=False) - assert default_node_permission_queryset(user_2, Node).count() == 2 - group = OSFGroupFactory(creator=user_2) - project_3.add_osf_group(group, permissions.READ) - assert default_node_permission_queryset(user_2, Node).count() == 3 - def test_current_user_permissions(self, app, user, url, public_project, non_contrib): # in most recent API version, read isn't implicit for public nodes url_public = url + '?version=2.11' @@ -275,53 +259,6 @@ def test_current_user_permissions(self, app, user, url, public_project, non_cont res = app.get(url_public, auth=superuser.auth) assert permissions.READ not in res.json['data'][0]['attributes']['current_user_permissions'] - def test_current_user_permissions_group_member(self, app, user, url, public_project): - # in most recent API version, read isn't implicit for public nodes - url_public = url + '?version=2.11' - - # Read group member has "read" permissions - group_member = AuthUserFactory() - osf_group = OSFGroupFactory(creator=group_member) - public_project.add_osf_group(osf_group, permissions.READ) - res = app.get(url_public, auth=group_member.auth) - assert public_project.has_permission(group_member, permissions.READ) - assert permissions.READ in res.json['data'][0]['attributes']['current_user_permissions'] - assert res.json['data'][0]['attributes']['current_user_is_contributor_or_group_member'] is True - - # Write group member has "read" and "write" permissions - group_member = AuthUserFactory() - osf_group = OSFGroupFactory(creator=group_member) - public_project.add_osf_group(osf_group, permissions.WRITE) - res = app.get(url_public, auth=group_member.auth) - assert res.json['data'][0]['attributes']['current_user_permissions'] == [permissions.WRITE, permissions.READ] - assert res.json['data'][0]['attributes']['current_user_is_contributor'] is False - assert res.json['data'][0]['attributes']['current_user_is_contributor_or_group_member'] is True - - # Admin group member has "read" and "write" and "admin" permissions - group_member = AuthUserFactory() - osf_group = OSFGroupFactory(creator=group_member) - public_project.add_osf_group(osf_group, permissions.ADMIN) - res = app.get(url_public, auth=group_member.auth) - assert res.json['data'][0]['attributes']['current_user_permissions'] == [permissions.ADMIN, permissions.WRITE, permissions.READ] - assert res.json['data'][0]['attributes']['current_user_is_contributor'] is False - assert res.json['data'][0]['attributes']['current_user_is_contributor_or_group_member'] is True - - # make sure 'read' is there for implicit read group members - NodeFactory(parent=public_project, is_public=True) - res = app.get(url_public, auth=group_member.auth) - assert public_project.has_permission(user, permissions.ADMIN) - assert permissions.READ in res.json['data'][0]['attributes']['current_user_permissions'] - assert res.json['data'][0]['attributes']['current_user_is_contributor'] is False - assert res.json['data'][0]['attributes']['current_user_is_contributor_or_group_member'] is False - - # ensure 'read' is still included with older versions - public_project.remove_osf_group(osf_group) - res = app.get(url, auth=group_member.auth) - assert not public_project.has_permission(group_member, permissions.READ) - assert permissions.READ in res.json['data'][0]['attributes']['current_user_permissions'] - assert res.json['data'][0]['attributes']['current_user_is_contributor'] is False - assert res.json['data'][0]['attributes']['current_user_is_contributor_or_group_member'] is False - @pytest.mark.django_db @pytest.mark.enable_bookmark_creation @@ -1637,35 +1574,6 @@ def test_create_component_inherit_contributors( new_component.contributors ) == len(parent_project.contributors) - def test_create_component_inherit_groups( - self, app, user_one, user_two, title, category): - parent_project = ProjectFactory(creator=user_one) - group = OSFGroupFactory(creator=user_one) - second_group = OSFGroupFactory() - third_group = OSFGroupFactory(creator=user_two) - third_group.make_member(user_one) - parent_project.add_osf_group(group, permissions.WRITE) - parent_project.add_osf_group(second_group, permissions.WRITE) - url = '/{}nodes/{}/children/?inherit_contributors=true'.format( - API_BASE, parent_project._id) - component_data = { - 'data': { - 'type': 'nodes', - 'attributes': { - 'title': title, - 'category': category, - } - } - } - res = app.post_json_api(url, component_data, auth=user_one.auth) - assert res.status_code == 201 - json_data = res.json['data'] - new_component_id = json_data['id'] - new_component = AbstractNode.load(new_component_id) - assert group in new_component.osf_groups - assert second_group not in new_component.osf_groups - assert third_group not in new_component.osf_groups - def test_create_component_with_tags(self, app, user_one, title, category): parent_project = ProjectFactory(creator=user_one) url = f'/{API_BASE}nodes/{parent_project._id}/children/' @@ -1691,42 +1599,6 @@ def test_create_component_with_tags(self, app, user_one, title, category): assert tag1.name == 'test tag 1' assert tag2.name == 'test tag 2' - def test_create_component_inherit_contributors_with_unregistered_contributor( - self, app, user_one, title, category): - parent_project = ProjectFactory(creator=user_one) - parent_project.add_unregistered_contributor( - fullname='far', email='foo@bar.baz', - permissions=permissions.READ, - auth=Auth(user=user_one), save=True) - osf_group = OSFGroupFactory(creator=user_one) - osf_group.add_unregistered_member(fullname='far', email='foo@bar.baz', auth=Auth(user_one)) - osf_group.save() - parent_project.add_osf_group(osf_group, permissions.ADMIN) - url = '/{}nodes/{}/children/?inherit_contributors=true'.format( - API_BASE, parent_project._id) - component_data = { - 'data': { - 'type': 'nodes', - 'attributes': { - 'title': title, - 'category': category, - } - } - } - res = app.post_json_api(url, component_data, auth=user_one.auth) - assert res.status_code == 201 - json_data = res.json['data'] - - new_component_id = json_data['id'] - new_component = AbstractNode.load(new_component_id) - assert len(new_component.contributors) == 2 - assert len( - new_component.contributors - ) == len(parent_project.contributors) - expected_perms = {permissions.READ, permissions.ADMIN} - actual_perms = {contributor.permission for contributor in new_component.contributor_set.all()} - assert actual_perms == expected_perms - def test_create_component_inherit_contributors_with_blocked_email( self, app, user_one, title, category): parent_project = ProjectFactory(creator=user_one) @@ -4002,36 +3874,6 @@ def test_skip_uneditable_has_admin_permission_for_one_node( assert public_project_one.is_deleted is True assert public_project_three.is_deleted is False - def test_skip_uneditable_has_admin_permission_for_one_node_group_members( - self, app, public_project_one, public_project_three, url): - group_member = AuthUserFactory() - group = OSFGroupFactory(creator=group_member) - public_project_one.add_osf_group(group, permissions.ADMIN) - public_project_one.save() - public_project_three.add_osf_group(group, permissions.WRITE) - public_project_three.save() - payload = { - 'data': [ - { - 'id': public_project_one._id, - 'type': 'nodes', - }, - { - 'id': public_project_three._id, - 'type': 'nodes', - } - ] - } - - res = app.delete_json_api(url, payload, auth=group_member.auth, bulk=True) - assert res.status_code == 200 - assert res.json['errors'][0]['id'] == public_project_three._id - public_project_one.reload() - public_project_three.reload() - - assert public_project_one.is_deleted is True - assert public_project_three.is_deleted is False - def test_skip_uneditable_does_not_have_admin_permission_for_any_nodes( self, app, user_one, public_project_three, public_project_four, url): payload = { diff --git a/api_tests/nodes/views/test_node_logs.py b/api_tests/nodes/views/test_node_logs.py index 23c5056891f..220fadd1792 100644 --- a/api_tests/nodes/views/test_node_logs.py +++ b/api_tests/nodes/views/test_node_logs.py @@ -7,11 +7,9 @@ from osf_tests.factories import ( AuthUserFactory, ProjectFactory, - OSFGroupFactory, RegistrationFactory, EmbargoFactory, ) -from osf.utils.permissions import READ from tests.base import assert_datetime_equal from api_tests.utils import disconnected_from_listeners from website.project.signals import contributor_removed @@ -77,13 +75,6 @@ def public_url(self, public_project): return '/{}nodes/{}/logs/?version=2.2'.format( API_BASE, public_project._id) - def test_can_view_osf_group_log(self, app, private_project, private_url): - group_mem = AuthUserFactory() - group = OSFGroupFactory(creator=group_mem) - private_project.add_osf_group(group, READ) - res = app.get(private_url, auth=group_mem.auth) - assert res.status_code == 200 - def test_add_tag(self, app, user, user_auth, public_project, public_url): public_project.add_tag('Rheisen', auth=user_auth) assert public_project.logs.latest().action == 'tag_added' diff --git a/api_tests/nodes/views/test_node_registrations_list.py b/api_tests/nodes/views/test_node_registrations_list.py index 45707f0b0d5..e8861b4ed6e 100644 --- a/api_tests/nodes/views/test_node_registrations_list.py +++ b/api_tests/nodes/views/test_node_registrations_list.py @@ -5,10 +5,8 @@ from osf_tests.factories import ( ProjectFactory, RegistrationFactory, - OSFGroupFactory, AuthUserFactory, ) -from osf.utils.permissions import READ def node_url_for(n_id): @@ -85,13 +83,6 @@ def test_node_registration_list( assert res.status_code == 401 assert 'detail' in res.json['errors'][0] - # test_return_private_registration_group_mem_read - group_mem = AuthUserFactory() - group = OSFGroupFactory(creator=group_mem) - private_project.add_osf_group(group, READ) - res = app.get(private_url, expect_errors=True, auth=group_mem.auth) - assert res.status_code == 200 - # test_return_private_registrations_logged_in_contributor res = app.get(private_url, auth=user.auth) assert res.status_code == 200 diff --git a/api_tests/nodes/views/test_node_relationship_institutions.py b/api_tests/nodes/views/test_node_relationship_institutions.py index 47c607f3ed0..3bf25dc5adf 100644 --- a/api_tests/nodes/views/test_node_relationship_institutions.py +++ b/api_tests/nodes/views/test_node_relationship_institutions.py @@ -1,5 +1,4 @@ import pytest -from unittest import mock from api.base.settings.defaults import API_BASE from osf_tests.factories import ( @@ -8,7 +7,6 @@ NodeFactory, ) from osf.utils import permissions -from website import mails @pytest.mark.django_db @@ -115,6 +113,7 @@ def create_payload(self, institutions): ] } +@pytest.mark.usefixtures('mock_send_grid') class TestNodeRelationshipInstitutions(RelationshipInstitutionsTestMixin): def test_node_with_no_permissions(self, app, unauthorized_user_with_affiliation, institution_one, node_institutions_url): @@ -203,72 +202,59 @@ def test_user_with_institution_and_permissions( assert institution_one in node.affiliated_institutions.all() assert institution_two in node.affiliated_institutions.all() - @mock.patch('website.mails.settings.USE_EMAIL', True) def test_user_with_institution_and_permissions_through_patch(self, app, user, institution_one, institution_two, - node, node_institutions_url): - with mock.patch('osf.models.mixins.mails.send_mail') as mocked_send_mail: - res = app.patch_json_api( - node_institutions_url, - self.create_payload([institution_one, institution_two]), - auth=user.auth - ) - assert res.status_code == 200 - assert mocked_send_mail.call_count == 2 - - first_call_args = mocked_send_mail.call_args_list[0] - assert first_call_args == mock.call( - user.username, - mails.PROJECT_AFFILIATION_CHANGED, - user=user, - node=node, - ) - - second_call_args = mocked_send_mail.call_args_list[1] - assert second_call_args == mock.call( - user.username, - mails.PROJECT_AFFILIATION_CHANGED, - user=user, - node=node, - ) - - @mock.patch('website.mails.settings.USE_EMAIL', True) - def test_remove_institutions_with_affiliated_user(self, app, user, institution_one, node, node_institutions_url): + node, node_institutions_url, mock_send_grid): + + mock_send_grid.reset_mock() + res = app.patch_json_api( + node_institutions_url, + self.create_payload([institution_one, institution_two]), + auth=user.auth + ) + assert res.status_code == 200 + assert mock_send_grid.call_count == 2 + + first_call_args = mock_send_grid.call_args_list[0][1] + assert first_call_args['to_addr'] == user.email + assert first_call_args['subject'] == 'Project Affiliation Changed' + + second_call_args = mock_send_grid.call_args_list[1][1] + assert second_call_args['to_addr'] == user.email + assert second_call_args['subject'] == 'Project Affiliation Changed' + + def test_remove_institutions_with_affiliated_user(self, app, user, institution_one, node, node_institutions_url, mock_send_grid): node.affiliated_institutions.add(institution_one) node.save() assert institution_one in node.affiliated_institutions.all() - with mock.patch('osf.models.mixins.mails.send_mail') as mocked_send_mail: - res = app.put_json_api( - node_institutions_url, - { - 'data': [] - }, - auth=user.auth - ) - - mocked_send_mail.assert_called_with( - user.username, - mails.PROJECT_AFFILIATION_CHANGED, - user=user, - node=node, - ) + mock_send_grid.reset_mock() + res = app.put_json_api( + node_institutions_url, + { + 'data': [] + }, + auth=user.auth + ) + + first_call_args = mock_send_grid.call_args_list[0][1] + assert first_call_args['to_addr'] == user.email + assert first_call_args['subject'] == 'Project Affiliation Changed' assert res.status_code == 200 assert node.affiliated_institutions.count() == 0 - @mock.patch('website.mails.settings.USE_EMAIL', True) - def test_using_post_making_no_changes_returns_201(self, app, user, institution_one, node, node_institutions_url): + def test_using_post_making_no_changes_returns_201(self, app, user, institution_one, node, node_institutions_url, mock_send_grid): node.affiliated_institutions.add(institution_one) node.save() assert institution_one in node.affiliated_institutions.all() - with mock.patch('osf.models.mixins.mails.send_mail') as mocked_send_mail: - res = app.post_json_api( - node_institutions_url, - self.create_payload([institution_one]), - auth=user.auth - ) - mocked_send_mail.assert_not_called() + mock_send_grid.reset_mock() + res = app.post_json_api( + node_institutions_url, + self.create_payload([institution_one]), + auth=user.auth + ) + mock_send_grid.assert_not_called() assert res.status_code == 201 assert institution_one in node.affiliated_institutions.all() @@ -289,87 +275,70 @@ def test_put_not_admin_but_affiliated(self, app, institution_one, node, node_ins assert res.status_code == 200 assert institution_one in node.affiliated_institutions.all() - @mock.patch('website.mails.settings.USE_EMAIL', True) def test_add_through_patch_one_inst_to_node_with_inst( - self, app, user, institution_one, institution_two, node, node_institutions_url): + self, app, user, institution_one, institution_two, node, node_institutions_url, mock_send_grid): node.affiliated_institutions.add(institution_one) node.save() assert institution_one in node.affiliated_institutions.all() assert institution_two not in node.affiliated_institutions.all() - with mock.patch('osf.models.mixins.mails.send_mail') as mocked_send_mail: - res = app.patch_json_api( - node_institutions_url, - self.create_payload([institution_one, institution_two]), - auth=user.auth - ) - assert mocked_send_mail.call_count == 1 - first_call_args = mocked_send_mail.call_args_list[0] - assert first_call_args == mock.call( - user.username, - mails.PROJECT_AFFILIATION_CHANGED, - user=user, - node=node, - ) + mock_send_grid.reset_mock() + res = app.patch_json_api( + node_institutions_url, + self.create_payload([institution_one, institution_two]), + auth=user.auth + ) + assert mock_send_grid.call_count == 1 + first_call_args = mock_send_grid.call_args_list[0][1] + assert first_call_args['to_addr'] == user.email + assert first_call_args['subject'] == 'Project Affiliation Changed' assert res.status_code == 200 assert institution_one in node.affiliated_institutions.all() assert institution_two in node.affiliated_institutions.all() - @mock.patch('website.mails.settings.USE_EMAIL', True) def test_add_through_patch_one_inst_while_removing_other( - self, app, user, institution_one, institution_two, node, node_institutions_url): + self, app, user, institution_one, institution_two, node, node_institutions_url, mock_send_grid): node.affiliated_institutions.add(institution_one) node.save() assert institution_one in node.affiliated_institutions.all() assert institution_two not in node.affiliated_institutions.all() - with mock.patch('osf.models.mixins.mails.send_mail') as mocked_send_mail: - res = app.patch_json_api( - node_institutions_url, - self.create_payload([institution_two]), - auth=user.auth - ) - assert mocked_send_mail.call_count == 2 - first_call_args = mocked_send_mail.call_args_list[0] - assert first_call_args == mock.call( - user.username, - mails.PROJECT_AFFILIATION_CHANGED, - user=user, - node=node, - ) - second_call_args = mocked_send_mail.call_args_list[1] - assert second_call_args == mock.call( - user.username, - mails.PROJECT_AFFILIATION_CHANGED, - user=user, - node=node, - ) + mock_send_grid.reset_mock() + res = app.patch_json_api( + node_institutions_url, + self.create_payload([institution_two]), + auth=user.auth + ) + assert mock_send_grid.call_count == 2 + + first_call_args = mock_send_grid.call_args_list[0][1] + assert first_call_args['to_addr'] == user.email + assert first_call_args['subject'] == 'Project Affiliation Changed' + + second_call_args = mock_send_grid.call_args_list[1][1] + assert second_call_args['to_addr'] == user.email + assert second_call_args['subject'] == 'Project Affiliation Changed' assert res.status_code == 200 assert institution_one not in node.affiliated_institutions.all() assert institution_two in node.affiliated_institutions.all() - @mock.patch('website.mails.settings.USE_EMAIL', True) def test_add_one_inst_with_post_to_node_with_inst( - self, app, user, institution_one, institution_two, node, node_institutions_url): + self, app, user, institution_one, institution_two, node, node_institutions_url, mock_send_grid): node.affiliated_institutions.add(institution_one) node.save() assert institution_one in node.affiliated_institutions.all() assert institution_two not in node.affiliated_institutions.all() - with mock.patch('osf.models.mixins.mails.send_mail') as mocked_send_mail: - res = app.post_json_api( - node_institutions_url, - self.create_payload([institution_two]), - auth=user.auth - ) - mocked_send_mail.assert_called_with( - user.username, - mails.PROJECT_AFFILIATION_CHANGED, - user=user, - node=node, - ) + res = app.post_json_api( + node_institutions_url, + self.create_payload([institution_two]), + auth=user.auth + ) + call_args = mock_send_grid.call_args[1] + assert call_args['to_addr'] == user.email + assert call_args['subject'] == 'Project Affiliation Changed' assert res.status_code == 201 assert institution_one in node.affiliated_institutions.all() @@ -383,23 +352,19 @@ def test_delete_nothing(self, app, user, node_institutions_url): ) assert res.status_code == 204 - @mock.patch('website.mails.settings.USE_EMAIL', True) - def test_delete_existing_inst(self, app, user, institution_one, node, node_institutions_url): + def test_delete_existing_inst(self, app, user, institution_one, node, node_institutions_url, mock_send_grid): node.affiliated_institutions.add(institution_one) node.save() - with mock.patch('osf.models.mixins.mails.send_mail') as mocked_send_mail: - res = app.delete_json_api( - node_institutions_url, - self.create_payload([institution_one]), - auth=user.auth - ) - mocked_send_mail.assert_called_with( - user.username, - mails.PROJECT_AFFILIATION_CHANGED, - user=user, - node=node, - ) + res = app.delete_json_api( + node_institutions_url, + self.create_payload([institution_one]), + auth=user.auth + ) + + call_args = mock_send_grid.call_args[1] + assert call_args['to_addr'] == user.email + assert call_args['subject'] == 'Project Affiliation Changed' assert res.status_code == 204 assert institution_one not in node.affiliated_institutions.all() diff --git a/api_tests/nodes/views/test_node_reorder_components.py b/api_tests/nodes/views/test_node_reorder_components.py index a7019b17988..5bfa2ac374b 100644 --- a/api_tests/nodes/views/test_node_reorder_components.py +++ b/api_tests/nodes/views/test_node_reorder_components.py @@ -4,7 +4,6 @@ AuthUserFactory, ProjectFactory, NodeFactory, - OSFGroupFactory, ) from osf.models import NodeRelation from osf.utils import permissions @@ -22,15 +21,7 @@ def read_contrib(): return AuthUserFactory() @pytest.fixture() -def group_member(): - return AuthUserFactory() - -@pytest.fixture() -def osf_group(group_member): - return OSFGroupFactory(creator=group_member) - -@pytest.fixture() -def project(admin_contrib, write_contrib, read_contrib, osf_group): +def project(admin_contrib, write_contrib, read_contrib): project = ProjectFactory(creator=admin_contrib) project.add_contributor(write_contrib, permissions.WRITE) project.add_contributor(read_contrib, permissions.READ) diff --git a/api_tests/nodes/views/test_node_settings.py b/api_tests/nodes/views/test_node_settings.py index f95d4c61ad6..4861e4d3035 100644 --- a/api_tests/nodes/views/test_node_settings.py +++ b/api_tests/nodes/views/test_node_settings.py @@ -5,7 +5,6 @@ AuthUserFactory, ProjectFactory, PrivateLinkFactory, - OSFGroupFactory, ) from osf.models import NodeLog from osf.utils import permissions @@ -22,14 +21,6 @@ def write_contrib(): def read_contrib(): return AuthUserFactory() -@pytest.fixture() -def group_member(): - return AuthUserFactory() - -@pytest.fixture() -def osf_group(group_member): - return OSFGroupFactory(creator=group_member) - @pytest.fixture() def project(admin_contrib, write_contrib, read_contrib): project = ProjectFactory(creator=admin_contrib) @@ -50,7 +41,7 @@ class TestNodeSettingsGet: def non_contrib(self): return AuthUserFactory() - def test_node_settings_detail(self, app, admin_contrib, non_contrib, write_contrib, osf_group, group_member, url, project): + def test_node_settings_detail(self, app, admin_contrib, non_contrib, write_contrib, url, project): # non logged in uers can't access node settings res = app.get(url, expect_errors=True) @@ -68,11 +59,6 @@ def test_node_settings_detail(self, app, admin_contrib, non_contrib, write_contr res = app.get(url, auth=admin_contrib.auth) assert res.status_code == 200 - # group member can access node settings - project.add_osf_group(osf_group, permissions.READ) - res = app.get(url, auth=group_member.auth) - assert res.status_code == 200 - # allow_access_requests project.allow_access_requests = True project.save() @@ -146,7 +132,7 @@ def payload(self, project): } } - def test_put_permissions(self, app, project, payload, admin_contrib, write_contrib, read_contrib, url, osf_group, group_member): + def test_put_permissions(self, app, project, payload, admin_contrib, write_contrib, read_contrib, url): assert project.access_requests_enabled is True payload['data']['attributes']['access_requests_enabled'] = False # Logged out @@ -162,32 +148,14 @@ def test_put_permissions(self, app, project, payload, admin_contrib, write_contr res = app.put_json_api(url, payload, auth=read_contrib.auth, expect_errors=True) assert res.status_code == 403 - # group member read - project.add_osf_group(osf_group, permissions.READ) - project.save() - res = app.put_json_api(url, payload, auth=group_member.auth, expect_errors=True) - assert res.status_code == 403 - # Logged in write (Write contribs can only change some node settings) res = app.put_json_api(url, payload, auth=write_contrib.auth, expect_errors=True) assert res.status_code == 403 - # group member write - project.update_osf_group(osf_group, permissions.WRITE) - project.save() - res = app.put_json_api(url, payload, auth=group_member.auth, expect_errors=True) - assert res.status_code == 403 - # Logged in write (Write group mems can only change some node settings) res = app.put_json_api(url, payload, auth=admin_contrib.auth) assert res.status_code == 200 - # group member admin - project.update_osf_group(osf_group, permissions.ADMIN) - project.save() - res = app.put_json_api(url, payload, auth=group_member.auth, expect_errors=True) - assert res.status_code == 200 - @pytest.mark.django_db class TestNodeSettingsUpdate: @@ -203,7 +171,7 @@ def payload(self, project): } } - def test_patch_permissions(self, app, project, payload, admin_contrib, write_contrib, read_contrib, group_member, osf_group, url): + def test_patch_permissions(self, app, project, payload, admin_contrib, write_contrib, read_contrib, url): payload['data']['attributes']['redirect_link_enabled'] = True payload['data']['attributes']['redirect_link_url'] = 'https://cos.io' # Logged out @@ -227,21 +195,6 @@ def test_patch_permissions(self, app, project, payload, admin_contrib, write_con res = app.patch_json_api(url, payload, auth=admin_contrib.auth) assert res.status_code == 200 - # Logged in read group mem - project.add_osf_group(osf_group, permissions.READ) - res = app.patch_json_api(url, payload, auth=read_contrib.auth, expect_errors=True) - assert res.status_code == 403 - - # Logged in write group mem (Write group mems can only change some node settings) - project.add_osf_group(osf_group, permissions.WRITE) - res = app.patch_json_api(url, payload, auth=write_contrib.auth, expect_errors=True) - assert res.status_code == 200 - - # Logged in admin group mem - project.add_osf_group(osf_group, permissions.ADMIN) - res = app.patch_json_api(url, payload, auth=admin_contrib.auth) - assert res.status_code == 200 - def test_patch_invalid_type(self, app, project, payload, admin_contrib, url): payload['data']['type'] = 'Invalid Type' diff --git a/api_tests/nodes/views/test_node_wiki_list.py b/api_tests/nodes/views/test_node_wiki_list.py index 1972281f370..69ca6b2bb07 100644 --- a/api_tests/nodes/views/test_node_wiki_list.py +++ b/api_tests/nodes/views/test_node_wiki_list.py @@ -11,10 +11,8 @@ from osf_tests.factories import ( AuthUserFactory, ProjectFactory, - OSFGroupFactory, RegistrationFactory, ) -from osf.utils.permissions import WRITE, READ from tests.base import fake @@ -124,15 +122,6 @@ def test_return_wikis( assert res.status_code == 401 assert res.json['errors'][0]['detail'] == exceptions.NotAuthenticated.default_detail - # test_return_private_node_wikis_logged_in_osf_group_member - group_mem = AuthUserFactory() - group = OSFGroupFactory(creator=group_mem) - private_project.add_osf_group(group, READ) - res = app.get(private_url, auth=group_mem.auth) - assert res.status_code == 200 - wiki_ids = [wiki['id'] for wiki in res.json['data']] - assert private_wiki._id in wiki_ids - # test_return_private_node_wikis_logged_in_non_contributor res = app.get(private_url, auth=non_contrib.auth, expect_errors=True) assert res.status_code == 403 @@ -347,13 +336,6 @@ def test_create_public_wiki_page_with_content(self, app, user_write_contributor, wiki_page = WikiPage.objects.get_for_node(project_public, page_name) assert wiki_page.get_version().content == 'my first wiki page' - # test_osf_group_member_write - group_mem = AuthUserFactory() - group = OSFGroupFactory(creator=group_mem) - project_public.add_osf_group(group, WRITE) - res = app.post_json_api(url_node_public, create_wiki_payload(fake.word()), auth=group_mem.auth, expect_errors=True) - assert res.status_code == 201 - def test_create_public_wiki_page_with_empty_content(self, app, user_write_contributor, url_node_public, project_public): page_name = fake.word() payload = create_wiki_payload(page_name) @@ -384,13 +366,6 @@ def test_do_not_create_public_wiki_page( res = app.post_json_api(url_node_public, create_wiki_payload(fake.word()), auth=user_non_contributor.auth, expect_errors=True) assert res.status_code == 403 - # test_do_not_create_public_wiki_page_as_read_osf_group_member - group_mem = AuthUserFactory() - group = OSFGroupFactory(creator=group_mem) - project_public.add_osf_group(group, READ) - res = app.post_json_api(url_node_public, create_wiki_payload(fake.word()), auth=group_mem.auth, expect_errors=True) - assert res.status_code == 403 - # test_do_not_create_public_wiki_page_as_unauthenticated res = app.post_json_api(url_node_public, create_wiki_payload(fake.word()), expect_errors=True) assert res.status_code == 401 diff --git a/api_tests/osf_groups/__init__.py b/api_tests/osf_groups/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/api_tests/osf_groups/views/__init__.py b/api_tests/osf_groups/views/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/api_tests/osf_groups/views/test_osf_group_detail.py b/api_tests/osf_groups/views/test_osf_group_detail.py deleted file mode 100644 index aa5b7c63b9c..00000000000 --- a/api_tests/osf_groups/views/test_osf_group_detail.py +++ /dev/null @@ -1,209 +0,0 @@ -import pytest - -from waffle.testutils import override_flag -from django.contrib.auth.models import Group - -from api.base.settings.defaults import API_BASE -from osf.models import OSFGroup -from osf_tests.factories import ( - AuthUserFactory, - OSFGroupFactory, -) -from osf.features import OSF_GROUPS - - -def build_member_relationship_payload(user_ids): - return { - 'data': [{ - 'type': 'users', - 'id': user_id - } for user_id in user_ids] - } - -@pytest.fixture() -def user(): - return AuthUserFactory() - -@pytest.fixture() -def manager(): - return AuthUserFactory() - -@pytest.fixture() -def member(): - return AuthUserFactory() - -@pytest.fixture() -def old_name(): - return 'Platform Team' - -@pytest.fixture() -def new_name(): - return 'My New Lab' - -@pytest.fixture() -def osf_group(manager, member, old_name): - group = OSFGroupFactory(name=old_name, creator=manager) - group.make_member(member) - return group - -@pytest.fixture() -def url(osf_group): - return f'/{API_BASE}groups/{osf_group._id}/' - -@pytest.fixture() -def managers_url(url): - return url + 'managers/' - -@pytest.fixture() -def members_url(url): - return url + 'members/' - -@pytest.fixture() -def name_payload(osf_group, new_name): - return { - 'data': { - 'id': osf_group._id, - 'type': 'groups', - 'attributes': { - 'name': new_name - } - } - } - - -@pytest.mark.django_db -class TestGroupDetail: - - def test_return(self, app, member, manager, user, osf_group, url): - with override_flag(OSF_GROUPS, active=True): - # test unauthenticated - res = app.get(url) - assert res.status_code == 200 - data = res.json['data'] - assert data['id'] == osf_group._id - assert data['type'] == 'groups' - assert data['attributes']['name'] == osf_group.name - assert 'members' in data['relationships'] - - # test authenticated user - res = app.get(url, auth=user.auth) - assert res.status_code == 200 - data = res.json['data'] - assert data['id'] == osf_group._id - assert data['type'] == 'groups' - assert data['attributes']['name'] == osf_group.name - assert 'members' in data['relationships'] - - # test authenticated member - res = app.get(url, auth=member.auth) - assert res.status_code == 200 - data = res.json['data'] - assert data['id'] == osf_group._id - assert data['type'] == 'groups' - assert data['attributes']['name'] == osf_group.name - assert 'members' in data['relationships'] - - # test authenticated manager - res = app.get(url, auth=manager.auth) - assert res.status_code == 200 - data = res.json['data'] - assert data['id'] == osf_group._id - assert data['type'] == 'groups' - assert data['attributes']['name'] == osf_group.name - assert 'members' in data['relationships'] - - # test invalid group - url = '/{}groups/{}/'.format(API_BASE, '12345_bad_id') - res = app.get(url, expect_errors=True) - assert res.status_code == 404 - - -@pytest.mark.django_db -class TestOSFGroupUpdate: - def test_patch_osf_group_perms(self, app, member, manager, user, osf_group, url, name_payload, new_name): - with override_flag(OSF_GROUPS, active=True): - # test unauthenticated - res = app.patch_json_api(url, expect_errors=True) - assert res.status_code == 401 - - # test authenticated_user - res = app.patch_json_api(url, {}, auth=user.auth, expect_errors=True) - assert res.status_code == 403 - - # test authenticated_member - res = app.patch_json_api(url, {}, auth=member.auth, expect_errors=True) - assert res.status_code == 403 - - # test authenticated_manager - res = app.patch_json_api(url, name_payload, auth=manager.auth, expect_errors=True) - assert res.status_code == 200 - assert res.json['data']['attributes']['name'] == new_name - - def test_patch_osf_group_attributes(self, app, manager, osf_group, url, name_payload, old_name, new_name): - with override_flag(OSF_GROUPS, active=True): - # test_blank_name - assert osf_group.name == old_name - name_payload['data']['attributes']['name'] = '' - res = app.patch_json_api(url, name_payload, auth=manager.auth, expect_errors=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'This field may not be blank.' - osf_group.reload - assert osf_group.name == old_name - - # test_name_updated - name_payload['data']['attributes']['name'] = new_name - res = app.patch_json_api(url, name_payload, auth=manager.auth, expect_errors=True) - assert res.status_code == 200 - assert res.json['data']['attributes']['name'] == new_name - osf_group.reload() - assert osf_group.name == new_name - - # test_invalid_type - name_payload['data']['type'] = 'bad_type' - res = app.patch_json_api(url, name_payload, auth=manager.auth, expect_errors=True) - assert res.status_code == 409 - - # test_id_mismatch - name_payload['data']['type'] = 'groups' - name_payload['data']['id'] = '12345_bad_id' - res = app.patch_json_api(url, name_payload, auth=manager.auth, expect_errors=True) - assert res.status_code == 409 - - -@pytest.mark.django_db -class TestOSFGroupDelete: - def test_delete_perms(self, app, osf_group, manager, member, user, url): - with override_flag(OSF_GROUPS, active=True): - res = app.delete_json_api(url, expect_errors=True) - assert res.status_code == 401 - - res = app.delete_json_api(url, auth=user.auth, expect_errors=True) - assert res.status_code == 403 - - res = app.delete_json_api(url, auth=member.auth, expect_errors=True) - assert res.status_code == 403 - - res = app.delete_json_api(url, auth=manager.auth) - assert res.status_code == 204 - - def test_delete_specifics(self, app, osf_group, manager, member, user, url): - with override_flag(OSF_GROUPS, active=True): - osf_group_name = osf_group.name - manager_group_name = osf_group.manager_group.name - member_group_name = osf_group.member_group.name - - assert manager_group_name in manager.groups.values_list('name', flat=True) - assert member_group_name in member.groups.values_list('name', flat=True) - - res = app.delete_json_api(url, auth=manager.auth) - assert res.status_code == 204 - - assert not OSFGroup.objects.filter(name=osf_group_name).exists() - assert not Group.objects.filter(name=manager_group_name).exists() - assert not Group.objects.filter(name=member_group_name).exists() - - assert manager_group_name not in manager.groups.values_list('name', flat=True) - assert member_group_name not in member.groups.values_list('name', flat=True) - - res = app.get(url, auth=manager.auth, expect_errors=True) - assert res.status_code == 404 diff --git a/api_tests/osf_groups/views/test_osf_group_members_detail.py b/api_tests/osf_groups/views/test_osf_group_members_detail.py deleted file mode 100644 index 4b643058063..00000000000 --- a/api_tests/osf_groups/views/test_osf_group_members_detail.py +++ /dev/null @@ -1,259 +0,0 @@ -import pytest -from waffle.testutils import override_flag - -from framework.auth.core import Auth -from api.base.settings.defaults import API_BASE -from osf.utils.permissions import MEMBER, MANAGER -from osf_tests.factories import ( - AuthUserFactory, - OSFGroupFactory, -) -from osf.features import OSF_GROUPS - - -@pytest.fixture() -def user(): - return AuthUserFactory() - -@pytest.fixture() -def manager(): - return AuthUserFactory() - -@pytest.fixture() -def member(): - return AuthUserFactory() - -@pytest.fixture() -def old_name(): - return 'Platform Team' - -@pytest.fixture() -def osf_group(manager, member, old_name): - group = OSFGroupFactory(name=old_name, creator=manager) - group.make_member(member) - return group - -@pytest.fixture() -def url(osf_group, member): - return f'/{API_BASE}groups/{osf_group._id}/members/{member._id}/' - -@pytest.fixture() -def bad_url(osf_group): - return '/{}groups/{}/members/{}/'.format(API_BASE, osf_group._id, '12345') - -@pytest.mark.django_db -class TestOSFGroupMembersDetail: - def test_return_perms(self, app, member, manager, user, osf_group, url, bad_url): - with override_flag(OSF_GROUPS, active=True): - # test unauthenticated - res = app.get(url) - assert res.status_code == 200 - - # test user - res = app.get(url, auth=user.auth) - assert res.status_code == 200 - - # test member - res = app.get(url, auth=member.auth) - assert res.status_code == 200 - - # test manager - res = app.get(url, auth=manager.auth) - assert res.status_code == 200 - - # test invalid member - res = app.get(bad_url, auth=manager.auth, expect_errors=True) - assert res.status_code == 404 - - def test_return_member(self, app, member, manager, osf_group, url): - with override_flag(OSF_GROUPS, active=True): - res = app.get(url) - assert res.status_code == 200 - data = res.json['data'] - assert data['id'] == f'{osf_group._id}-{member._id}' - assert data['type'] == 'group-members' - assert data['attributes']['role'] == MEMBER - assert data['attributes']['unregistered_member'] is None - assert data['attributes']['full_name'] == member.fullname - assert member._id in data['relationships']['users']['links']['related']['href'] - - user = osf_group.add_unregistered_member('Crazy 8s', 'eight@cos.io', Auth(manager), MANAGER) - res = app.get(f'/{API_BASE}groups/{osf_group._id}/members/{user._id}/') - assert res.status_code == 200 - data = res.json['data'] - assert data['id'] == f'{osf_group._id}-{user._id}' - assert data['type'] == 'group-members' - assert data['attributes']['role'] == MANAGER - assert data['attributes']['unregistered_member'] == 'Crazy 8s' - assert data['attributes']['full_name'] == 'Crazy 8s' - assert res.json['data']['attributes']['full_name'] == 'Crazy 8s' - - -def build_update_payload(group_id, user_id, role): - return { - 'data': { - 'id': f'{group_id}-{user_id}', - 'type': 'group-members', - 'attributes': { - 'role': role - } - } - } - -@pytest.mark.django_db -class TestOSFGroupMembersUpdate: - def test_update_role(self, app, member, manager, user, osf_group, url): - with override_flag(OSF_GROUPS, active=True): - payload = build_update_payload(osf_group._id, member._id, MANAGER) - - # test unauthenticated - res = app.patch_json_api(url, payload, expect_errors=True) - assert res.status_code == 401 - - # test user - res = app.patch_json_api(url, payload, auth=user.auth, expect_errors=True) - assert res.status_code == 403 - - # test member - res = app.patch_json_api(url, payload, auth=member.auth, expect_errors=True) - assert res.status_code == 403 - - # test manager - res = app.patch_json_api(url, payload, auth=manager.auth) - assert res.status_code == 200 - assert res.json['data']['attributes']['role'] == MANAGER - assert res.json['data']['attributes']['full_name'] == member.fullname - assert res.json['data']['id'] == f'{osf_group._id}-{member._id}' - - payload = build_update_payload(osf_group._id, member._id, MEMBER) - res = app.patch_json_api(url, payload, auth=manager.auth) - assert res.status_code == 200 - assert res.json['data']['attributes']['role'] == MEMBER - assert res.json['data']['attributes']['full_name'] == member.fullname - assert res.json['data']['id'] == f'{osf_group._id}-{member._id}' - - def test_update_errors(self, app, member, manager, user, osf_group, url, bad_url): - with override_flag(OSF_GROUPS, active=True): - # id not in payload - payload = { - 'data': { - 'type': 'group-members', - 'attributes': { - 'role': MEMBER - } - } - } - res = app.patch_json_api(url, payload, auth=manager.auth, expect_errors=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'This field may not be null.' - - # test improperly formatted id - payload = build_update_payload(osf_group._id, member._id, MANAGER) - payload['data']['id'] = 'abcde' - res = app.patch_json_api(url, payload, auth=manager.auth, expect_errors=True) - assert res.status_code == 409 - - # test improper type - payload = build_update_payload(osf_group._id, member._id, MANAGER) - payload['data']['type'] = 'bad_type' - res = app.patch_json_api(url, payload, auth=manager.auth, expect_errors=True) - assert res.status_code == 409 - - # test invalid role - payload = build_update_payload(osf_group._id, member._id, 'bad_perm') - res = app.patch_json_api(url, payload, auth=manager.auth, expect_errors=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'bad_perm is not a valid role; choose manager or member.' - - # test user is not a member - payload = build_update_payload(osf_group._id, user._id, MEMBER) - bad_url = f'/{API_BASE}groups/{osf_group._id}/members/{user._id}/' - res = app.patch_json_api(bad_url, payload, auth=manager.auth, expect_errors=True) - assert res.status_code == 404 - assert res.json['errors'][0]['detail'] == f'{user._id} cannot be found in this OSFGroup' - - # test cannot downgrade remaining manager - payload = build_update_payload(osf_group._id, manager._id, MEMBER) - manager_url = f'/{API_BASE}groups/{osf_group._id}/members/{manager._id}/' - res = app.patch_json_api(manager_url, payload, auth=manager.auth, expect_errors=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'Group must have at least one manager.' - - # test cannot remove last confirmed manager - osf_group.add_unregistered_member('Crazy 8s', 'eight@cos.io', Auth(manager), MANAGER) - assert len(osf_group.managers) == 2 - res = app.patch_json_api(manager_url, payload, auth=manager.auth, expect_errors=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'Group must have at least one manager.' - - -@pytest.mark.django_db -class TestOSFGroupMembersDelete: - def test_delete_perms(self, app, member, manager, user, osf_group, url): - with override_flag(OSF_GROUPS, active=True): - # test unauthenticated - res = app.delete_json_api(url, expect_errors=True) - assert res.status_code == 401 - - # test user - res = app.delete_json_api(url, auth=user.auth, expect_errors=True) - assert res.status_code == 403 - - # test member - osf_group.make_member(user) - user_url = f'/{API_BASE}groups/{osf_group._id}/members/{user._id}/' - res = app.delete_json_api(user_url, auth=member.auth, expect_errors=True) - assert res.status_code == 403 - - # test manager - assert osf_group.is_member(member) is True - assert osf_group.is_manager(member) is False - - res = app.delete_json_api(url, auth=manager.auth) - assert res.status_code == 204 - assert osf_group.is_member(member) is False - assert osf_group.is_manager(member) is False - - # test delete manager (not last manager) - osf_group.make_manager(user) - assert osf_group.is_member(user) is True - assert osf_group.is_manager(user) is True - user_url = f'/{API_BASE}groups/{osf_group._id}/members/{user._id}/' - res = app.delete_json_api(user_url, auth=user.auth) - assert res.status_code == 204 - assert osf_group.is_member(user) is False - assert osf_group.is_manager(user) is False - - def test_delete_yourself(self, app, member, manager, user, osf_group, url): - with override_flag(OSF_GROUPS, active=True): - assert osf_group.is_member(member) is True - assert osf_group.is_manager(member) is False - res = app.delete_json_api(url, auth=member.auth, expect_errors=True) - assert res.status_code == 204 - assert osf_group.is_member(member) is False - assert osf_group.is_manager(member) is False - - def test_delete_errors(self, app, member, manager, user, osf_group, url, bad_url): - with override_flag(OSF_GROUPS, active=True): - # test invalid user - res = app.delete_json_api(bad_url, auth=manager.auth, expect_errors=True) - assert res.status_code == 404 - - # test user does not belong to group - bad_url = f'/{API_BASE}groups/{osf_group._id}/members/{user._id}/' - res = app.delete_json_api(bad_url, auth=manager.auth, expect_errors=True) - assert res.status_code == 404 - assert res.json['errors'][0]['detail'] == f'{user._id} cannot be found in this OSFGroup' - - # test user is last manager - manager_url = f'/{API_BASE}groups/{osf_group._id}/members/{manager._id}/' - res = app.delete_json_api(manager_url, auth=manager.auth, expect_errors=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'Group must have at least one manager.' - - # test user is last registered manager - osf_group.add_unregistered_member('Crazy 8s', 'eight@cos.io', Auth(manager), MANAGER) - assert len(osf_group.managers) == 2 - res = app.delete_json_api(manager_url, auth=manager.auth, expect_errors=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'Group must have at least one manager.' diff --git a/api_tests/osf_groups/views/test_osf_group_members_list.py b/api_tests/osf_groups/views/test_osf_group_members_list.py deleted file mode 100644 index 6018016fb0c..00000000000 --- a/api_tests/osf_groups/views/test_osf_group_members_list.py +++ /dev/null @@ -1,626 +0,0 @@ -import pytest -from waffle.testutils import override_flag - -from django.utils import timezone - -from framework.auth.core import Auth -from api.base.settings.defaults import API_BASE -from osf.models import OSFUser -from osf.utils.permissions import MEMBER, MANAGE, MANAGER -from osf_tests.factories import ( - AuthUserFactory, - OSFGroupFactory, -) -from osf.features import OSF_GROUPS - - -@pytest.fixture() -def user(): - return AuthUserFactory() - -@pytest.fixture() -def manager(): - return AuthUserFactory() - -@pytest.fixture() -def member(): - return AuthUserFactory() - -@pytest.fixture() -def old_name(): - return 'Platform Team' - -@pytest.fixture() -def user3(osf_group): - return AuthUserFactory() - -@pytest.fixture() -def osf_group(manager, member, old_name): - group = OSFGroupFactory(name=old_name, creator=manager) - group.make_member(member) - return group - -@pytest.fixture() -def url(osf_group): - return f'/{API_BASE}groups/{osf_group._id}/members/' - - -@pytest.mark.django_db -class TestGroupMembersList: - def test_return_perms(self, app, member, manager, user, osf_group, url): - with override_flag(OSF_GROUPS, active=True): - # test unauthenticated - res = app.get(url) - assert res.status_code == 200 - - # test user - res = app.get(url, auth=user.auth) - assert res.status_code == 200 - - # test member - res = app.get(url, auth=member.auth) - assert res.status_code == 200 - - # test manager - res = app.get(url, auth=manager.auth) - assert res.status_code == 200 - - # test invalid group - url = '/{}groups/{}/members/'.format(API_BASE, '12345_bad_id') - res = app.get(url, auth=manager.auth, expect_errors=True) - assert res.status_code == 404 - - def test_return_members(self, app, member, manager, user, osf_group, url): - with override_flag(OSF_GROUPS, active=True): - res = app.get(url) - data = res.json['data'] - assert len(data) == 2 - member_ids = [mem['id'] for mem in data] - assert f'{osf_group._id}-{manager._id}' in member_ids - assert f'{osf_group._id}-{member._id}' in member_ids - - -@pytest.mark.django_db -class TestOSFGroupMembersFilter: - def test_filtering(self, app, member, manager, user, osf_group, url): - with override_flag(OSF_GROUPS, active=True): - # test filter members - url_filter = url + '?filter[role]=member' - res = app.get(url_filter) - data = res.json['data'] - assert len(data) == 1 - member_ids = [mem['id'] for mem in data] - assert f'{osf_group._id}-{member._id}' in member_ids - - # test filter managers - url_filter = url + '?filter[role]=manager' - res = app.get(url_filter) - data = res.json['data'] - assert len(data) == 1 - member_ids = [mem['id'] for mem in data] - assert f'{osf_group._id}-{manager._id}' in member_ids - - # test invalid role - url_filter = url + '?filter[role]=bad_role' - res = app.get(url_filter, expect_errors=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == "Value \'bad_role\' is not valid." - - # test filter fullname - url_filter = url + f'?filter[full_name]={manager.fullname}' - res = app.get(url_filter) - data = res.json['data'] - assert len(data) == 1 - member_ids = [mem['id'] for mem in data] - assert f'{osf_group._id}-{manager._id}' in member_ids - - # test filter fullname - url_filter = url + f'?filter[full_name]={member.fullname}' - res = app.get(url_filter) - data = res.json['data'] - assert len(data) == 1 - member_ids = [mem['id'] for mem in data] - assert f'{osf_group._id}-{member._id}' in member_ids - - # test invalid filter - url_filter = url + '?filter[created]=2018-02-01' - res = app.get(url_filter, expect_errors=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == "\'created\' is not a valid field for this endpoint." - -def make_create_payload(role, user=None, full_name=None, email=None): - base_payload = { - 'data': { - 'type': 'group-members', - 'attributes': { - 'role': role - } - } - } - if user: - base_payload['data']['relationships'] = { - 'users': { - 'data': { - 'id': user._id, - 'type': 'users' - } - } - } - else: - if full_name: - base_payload['data']['attributes']['full_name'] = full_name - if email: - base_payload['data']['attributes']['email'] = email - - return base_payload - -@pytest.mark.django_db -class TestOSFGroupMembersCreate: - def test_create_manager(self, app, manager, user3, osf_group, url): - with override_flag(OSF_GROUPS, active=True): - payload = make_create_payload(MANAGER, user3) - res = app.post_json_api(url, payload, auth=manager.auth) - assert res.status_code == 201 - data = res.json['data'] - assert data['attributes']['role'] == MANAGER - assert data['attributes']['full_name'] == user3.fullname - assert data['attributes']['unregistered_member'] is None - assert data['id'] == f'{osf_group._id}-{user3._id}' - assert user3._id in data['relationships']['users']['links']['related']['href'] - assert osf_group.has_permission(user3, MANAGE) is True - - def test_create_member(self, app, member, manager, user3, osf_group, url): - with override_flag(OSF_GROUPS, active=True): - payload = make_create_payload(MEMBER, user3) - res = app.post_json_api(url, payload, auth=manager.auth) - assert res.status_code == 201 - data = res.json['data'] - assert data['attributes']['role'] == MEMBER - assert data['attributes']['full_name'] == user3.fullname - assert data['attributes']['unregistered_member'] is None - assert data['id'] == f'{osf_group._id}-{user3._id}' - assert data['id'] == f'{osf_group._id}-{user3._id}' - assert user3._id in data['relationships']['users']['links']['related']['href'] - assert osf_group.has_permission(user3, MANAGE) is False - assert osf_group.has_permission(user3, MEMBER) is True - - def test_add_unregistered_member(self, app, manager, osf_group, url): - with override_flag(OSF_GROUPS, active=True): - full_name = 'Crazy 8s' - payload = make_create_payload(MEMBER, user=None, full_name=full_name, email='eight@cos.io') - res = app.post_json_api(url, payload, auth=manager.auth) - assert res.status_code == 201 - data = res.json['data'] - assert data['attributes']['role'] == MEMBER - user = OSFUser.load(data['id'].split('-')[1]) - assert user._id in data['relationships']['users']['links']['related']['href'] - assert osf_group.has_permission(user, MANAGE) is False - assert data['attributes']['full_name'] == full_name - assert data['attributes']['unregistered_member'] == full_name - assert osf_group.has_permission(user, MEMBER) is True - assert user in osf_group.members_only - assert user not in osf_group.managers - - # test unregistered user is already a member - res = app.post_json_api(url, payload, auth=manager.auth, expect_errors=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'User already exists.' - - # test unregistered user email is blocked - payload['data']['attributes']['email'] = 'eight@example.com' - res = app.post_json_api(url, payload, auth=manager.auth, expect_errors=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'Email address domain is blocked.' - - def test_create_member_perms(self, app, manager, member, osf_group, user3, url): - with override_flag(OSF_GROUPS, active=True): - payload = make_create_payload(MEMBER, user3) - # Unauthenticated - res = app.post_json_api(url, payload, expect_errors=True) - assert res.status_code == 401 - - # Logged in, nonmember - res = app.post_json_api(url, payload, auth=user3.auth, expect_errors=True) - assert res.status_code == 403 - - # Logged in, nonmanager - res = app.post_json_api(url, payload, auth=member.auth, expect_errors=True) - assert res.status_code == 403 - - def test_create_members_errors(self, app, manager, member, user3, osf_group, url): - with override_flag(OSF_GROUPS, active=True): - # invalid user - bad_user_payload = make_create_payload(MEMBER, user=user3) - bad_user_payload['data']['relationships']['users']['data']['id'] = 'bad_user_id' - res = app.post_json_api(url, bad_user_payload, auth=manager.auth, expect_errors=True) - assert res.status_code == 404 - assert res.json['errors'][0]['detail'] == 'User with id bad_user_id not found.' - - # invalid type - bad_type_payload = make_create_payload(MEMBER, user=user3) - bad_type_payload['data']['type'] = 'bad_type' - res = app.post_json_api(url, bad_type_payload, auth=manager.auth, expect_errors=True) - assert res.status_code == 409 - - # invalid role - bad_perm_payload = make_create_payload('bad_role', user=user3) - res = app.post_json_api(url, bad_perm_payload, auth=manager.auth, expect_errors=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'bad_role is not a valid role; choose manager or member.' - - # fullname not included - unregistered_payload = make_create_payload(MEMBER, user=None, full_name=None, email='eight@cos.io') - res = app.post_json_api(url, unregistered_payload, auth=manager.auth, expect_errors=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'You must provide a full_name/email combination to add an unconfirmed member.' - - # email not included - unregistered_payload = make_create_payload(MEMBER, user=None, full_name='Crazy 8s', email=None) - res = app.post_json_api(url, unregistered_payload, auth=manager.auth, expect_errors=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'You must provide a full_name/email combination to add an unconfirmed member.' - - # user is already a member - existing_member_payload = make_create_payload(MEMBER, user=member) - res = app.post_json_api(url, existing_member_payload, auth=manager.auth, expect_errors=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'User is already a member of this group.' - - # Disabled user - user3.date_disabled = timezone.now() - user3.save() - payload = make_create_payload(MEMBER, user=user3) - res = app.post_json_api(url, payload, auth=manager.auth, expect_errors=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'Deactivated users cannot be added to OSF Groups.' - - # No role specified - given member by default - user3.date_disabled = None - user3.save() - payload = make_create_payload(MEMBER, user=user3) - payload['attributes'] = {} - res = app.post_json_api(url, payload, auth=manager.auth) - assert res.status_code == 201 - assert res.json['data']['attributes']['role'] == MEMBER - assert osf_group.has_permission(user3, 'member') - assert not osf_group.has_permission(user3, 'manager') - -def make_bulk_create_payload(role, user=None, full_name=None, email=None): - base_payload = { - 'type': 'group-members', - 'attributes': { - 'role': role - } - } - - if user: - base_payload['relationships'] = { - 'users': { - 'data': { - 'id': user._id, - 'type': 'users' - } - } - } - else: - if full_name: - base_payload['attributes']['full_name'] = full_name - if email: - base_payload['attributes']['email'] = email - - return base_payload - -@pytest.mark.django_db -class TestOSFGroupMembersBulkCreate: - def test_bulk_create_group_member_perms(self, app, url, manager, member, user, user3, osf_group): - with override_flag(OSF_GROUPS, active=True): - payload_user_three = make_bulk_create_payload(MANAGER, user3) - payload_user = make_bulk_create_payload(MEMBER, user) - bulk_payload = [payload_user_three, payload_user] - - # unauthenticated - res = app.post_json_api(url, {'data': bulk_payload}, expect_errors=True, bulk=True) - assert res.status_code == 401 - - # non member - res = app.post_json_api(url, {'data': bulk_payload}, auth=user.auth, expect_errors=True, bulk=True) - assert res.status_code == 403 - - # member - res = app.post_json_api(url, {'data': bulk_payload}, auth=member.auth, expect_errors=True, bulk=True) - assert res.status_code == 403 - - # manager - res = app.post_json_api(url, {'data': bulk_payload}, auth=manager.auth, bulk=True) - assert res.status_code == 201 - assert len(res.json['data']) == 2 - - assert osf_group.is_member(user) is True - assert osf_group.is_member(user3) is True - assert osf_group.is_manager(user) is False - assert osf_group.is_manager(user3) is True - - def test_bulk_create_unregistered(self, app, manager, user, osf_group, url): - with override_flag(OSF_GROUPS, active=True): - payload_user = make_bulk_create_payload(MEMBER, user) - payload_unregistered = make_bulk_create_payload(MEMBER, user=None, full_name='Crazy 8s', email='eight@cos.io') - res = app.post_json_api(url, {'data': [payload_user, payload_unregistered]}, auth=manager.auth, bulk=True) - unreg_user = OSFUser.objects.get(username='eight@cos.io') - assert res.status_code == 201 - ids = [user_data['id'] for user_data in res.json['data']] - roles = [user_data['attributes']['role'] for user_data in res.json['data']] - assert f'{osf_group._id}-{user._id}' in ids - assert f'{osf_group._id}-{unreg_user._id}' in ids - assert roles[0] == MEMBER - assert roles[1] == MEMBER - unregistered_names = [user_data['attributes']['unregistered_member'] for user_data in res.json['data']] - assert {'Crazy 8s', None} == set(unregistered_names) - - assert osf_group.has_permission(user, MANAGE) is False - assert osf_group.has_permission(user, MEMBER) is True - assert osf_group.has_permission(unreg_user, MANAGE) is False - assert osf_group.has_permission(unreg_user, MEMBER) is True - assert osf_group.is_member(unreg_user) is True - assert osf_group.is_manager(unreg_user) is False - - def test_bulk_create_group_member_errors(self, app, url, manager, member, user, user3, osf_group): - with override_flag(OSF_GROUPS, active=True): - payload_member = make_bulk_create_payload(MANAGER, member) - payload_user = make_bulk_create_payload(MANAGER, user) - - # User in bulk payload is an invalid user - bad_user_payload = make_bulk_create_payload(MEMBER, user=user3) - bad_user_payload['relationships']['users']['data']['id'] = 'bad_user_id' - bulk_payload = [payload_user, bad_user_payload] - res = app.post_json_api(url, {'data': bulk_payload}, auth=manager.auth, expect_errors=True, bulk=True) - assert res.status_code == 404 - assert res.json['errors'][0]['detail'] == 'User with id bad_user_id not found.' - assert osf_group.is_member(user) is False - assert osf_group.is_manager(user) is False - - # User in bulk payload is invalid - bad_type_payload = make_bulk_create_payload(MEMBER, user=user3) - bad_type_payload['type'] = 'bad_type' - bulk_payload = [payload_user, bad_type_payload] - res = app.post_json_api(url, {'data': bulk_payload}, auth=manager.auth, expect_errors=True, bulk=True) - assert res.status_code == 409 - assert osf_group.is_member(user) is False - assert osf_group.is_manager(user) is False - - # User in bulk payload has invalid role specified - bad_role_payload = make_bulk_create_payload('bad_role', user=user3) - res = app.post_json_api(url, {'data': [payload_user, bad_role_payload]}, auth=manager.auth, expect_errors=True, bulk=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'bad_role is not a valid role; choose manager or member.' - assert osf_group.is_member(user3) is False - assert osf_group.is_member(user) is False - assert osf_group.is_manager(user3) is False - assert osf_group.is_manager(user) is False - - # fullname not included - unregistered_payload = make_bulk_create_payload(MEMBER, user=None, full_name=None, email='eight@cos.io') - res = app.post_json_api(url, {'data': [payload_user, unregistered_payload]}, auth=manager.auth, expect_errors=True, bulk=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'You must provide a full_name/email combination to add an unconfirmed member.' - assert osf_group.is_member(user) is False - assert osf_group.is_manager(user) is False - - # email not included - unregistered_payload = make_bulk_create_payload(MEMBER, user=None, full_name='Crazy 8s', email=None) - res = app.post_json_api(url, {'data': [payload_user, unregistered_payload]}, auth=manager.auth, expect_errors=True, bulk=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'You must provide a full_name/email combination to add an unconfirmed member.' - assert osf_group.is_member(user) is False - assert osf_group.is_manager(user) is False - - # Member of bulk payload is already a member - bulk_payload = [payload_member, payload_user] - res = app.post_json_api(url, {'data': bulk_payload}, auth=manager.auth, expect_errors=True, bulk=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'User is already a member of this group.' - assert osf_group.is_member(member) is True - assert osf_group.is_member(user) is False - assert osf_group.is_manager(member) is False - assert osf_group.is_manager(user) is False - - # Disabled user - user3.date_disabled = timezone.now() - user3.save() - payload = make_bulk_create_payload(MEMBER, user=user3) - res = app.post_json_api(url, {'data': [payload_user, payload]}, auth=manager.auth, expect_errors=True, bulk=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'Deactivated users cannot be added to OSF Groups.' - - # No role specified, given member by default - user3.date_disabled = None - user3.save() - payload = make_bulk_create_payload(MEMBER, user=user3) - payload['attributes'] = {} - res = app.post_json_api(url, {'data': [payload_user, payload]}, auth=manager.auth, bulk=True) - assert res.status_code == 201 - assert len(res.json['data']) == 2 - ids = [user_data['id'] for user_data in res.json['data']] - assert f'{osf_group._id}-{user._id}' in ids - assert f'{osf_group._id}-{user3._id}' in ids - assert osf_group.is_member(user3) is True - assert osf_group.is_member(user) is True - assert osf_group.is_manager(user3) is False - assert osf_group.is_manager(user) is True - -def build_bulk_update_payload(group_id, user_id, role): - return { - 'id': f'{group_id}-{user_id}', - 'type': 'group-members', - 'attributes': { - 'role': role - } - } - - -@pytest.mark.django_db -class TestOSFGroupMembersBulkUpdate: - def test_update_role(self, app, member, manager, user, osf_group, url): - with override_flag(OSF_GROUPS, active=True): - payload = build_bulk_update_payload(osf_group._id, member._id, MANAGER) - bulk_payload = {'data': [payload]} - - # test unauthenticated - res = app.patch_json_api(url, bulk_payload, expect_errors=True, bulk=True) - assert res.status_code == 401 - - # test user - res = app.patch_json_api(url, bulk_payload, auth=user.auth, expect_errors=True, bulk=True) - assert res.status_code == 403 - - # test member - res = app.patch_json_api(url, bulk_payload, auth=member.auth, expect_errors=True, bulk=True) - assert res.status_code == 403 - - # test manager - res = app.patch_json_api(url, bulk_payload, auth=manager.auth, expect_errors=True, bulk=True) - assert res.status_code == 200 - assert res.json['data'][0]['attributes']['role'] == MANAGER - assert res.json['data'][0]['attributes']['full_name'] == member.fullname - assert res.json['data'][0]['id'] == f'{osf_group._id}-{member._id}' - - payload = build_bulk_update_payload(osf_group._id, member._id, MEMBER) - bulk_payload = {'data': [payload]} - res = app.patch_json_api(url, bulk_payload, auth=manager.auth, expect_errors=True, bulk=True) - assert res.status_code == 200 - assert res.json['data'][0]['attributes']['role'] == MEMBER - assert res.json['data'][0]['attributes']['full_name'] == member.fullname - assert res.json['data'][0]['id'] == f'{osf_group._id}-{member._id}' - - def test_bulk_update_errors(self, app, member, manager, user, osf_group, url): - with override_flag(OSF_GROUPS, active=True): - # id not in payload - payload = { - 'type': 'group-members', - 'attributes': { - 'role': MEMBER - } - } - bulk_payload = {'data': [payload]} - - res = app.patch_json_api(url, bulk_payload, auth=manager.auth, expect_errors=True, bulk=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'Member identifier not provided.' - - # test improperly formatted id - payload = build_bulk_update_payload(osf_group._id, member._id, MANAGER) - payload['id'] = 'abcde' - res = app.patch_json_api(url, {'data': [payload]}, auth=manager.auth, expect_errors=True, bulk=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'Member identifier incorrectly formatted.' - - # test improper type - payload = build_bulk_update_payload(osf_group._id, member._id, MANAGER) - payload['type'] = 'bad_type' - res = app.patch_json_api(url, {'data': [payload]}, auth=manager.auth, expect_errors=True, bulk=True) - assert res.status_code == 409 - - # test invalid role - payload = build_bulk_update_payload(osf_group._id, member._id, 'bad_perm') - res = app.patch_json_api(url, {'data': [payload]}, auth=manager.auth, expect_errors=True, bulk=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'bad_perm is not a valid role; choose manager or member.' - - # test user is not a member - payload = build_bulk_update_payload(osf_group._id, user._id, MEMBER) - res = app.patch_json_api(url, {'data': [payload]}, auth=manager.auth, expect_errors=True, bulk=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'Could not find all objects to update.' - - # test cannot downgrade remaining manager - payload = build_bulk_update_payload(osf_group._id, manager._id, MEMBER) - res = app.patch_json_api(url, {'data': [payload]}, auth=manager.auth, expect_errors=True, bulk=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'Group must have at least one manager.' - - # test cannot remove last confirmed manager - osf_group.add_unregistered_member('Crazy 8s', 'eight@cos.io', Auth(manager), MANAGER) - assert len(osf_group.managers) == 2 - res = app.patch_json_api(url, {'data': [payload]}, auth=manager.auth, expect_errors=True, bulk=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'Group must have at least one manager.' - -def create_bulk_delete_payload(group_id, user_id): - return { - 'id': f'{group_id}-{user_id}', - 'type': 'group-members' - } - -@pytest.mark.django_db -class TestOSFGroupMembersBulkDelete: - def test_delete_perms(self, app, member, manager, user, osf_group, url): - with override_flag(OSF_GROUPS, active=True): - member_payload = create_bulk_delete_payload(osf_group._id, member._id) - bulk_payload = {'data': [member_payload]} - # test unauthenticated - res = app.delete_json_api(url, bulk_payload, expect_errors=True, bulk=True) - assert res.status_code == 401 - - # test user - res = app.delete_json_api(url, bulk_payload, auth=user.auth, expect_errors=True, bulk=True) - assert res.status_code == 403 - - # test member - res = app.delete_json_api(url, bulk_payload, auth=member.auth, expect_errors=True, bulk=True) - assert res.status_code == 403 - - # test manager - assert osf_group.is_member(member) is True - assert osf_group.is_manager(member) is False - - res = app.delete_json_api(url, bulk_payload, auth=manager.auth, bulk=True) - assert res.status_code == 204 - assert osf_group.is_member(member) is False - assert osf_group.is_manager(member) is False - - # test user does not belong to OSF Group - osf_group.make_manager(user) - assert osf_group.is_member(user) is True - assert osf_group.is_manager(user) is True - user_payload = create_bulk_delete_payload(osf_group._id, user._id) - bulk_payload = {'data': [user_payload, member_payload]} - res = app.delete_json_api(url, bulk_payload, auth=user.auth, bulk=True, expect_errors=True) - assert res.status_code == 404 - assert res.json['errors'][0]['detail'] == f'{member._id} cannot be found in this OSFGroup' - - # test bulk delete manager (not last one) - osf_group.make_manager(user) - assert osf_group.is_member(user) is True - assert osf_group.is_manager(user) is True - user_payload = create_bulk_delete_payload(osf_group._id, user._id) - bulk_payload = {'data': [user_payload]} - res = app.delete_json_api(url, bulk_payload, auth=user.auth, bulk=True) - assert res.status_code == 204 - assert osf_group.is_member(user) is False - assert osf_group.is_manager(user) is False - - def test_delete_errors(self, app, member, manager, user, osf_group, url): - with override_flag(OSF_GROUPS, active=True): - # test invalid user - invalid_payload = create_bulk_delete_payload(osf_group._id, '12345') - res = app.delete_json_api(url, {'data': [invalid_payload]}, auth=manager.auth, expect_errors=True, bulk=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'Could not find all objects to delete.' - - # test user does not belong to group - invalid_payload = create_bulk_delete_payload(osf_group._id, user._id) - res = app.delete_json_api(url, {'data': [invalid_payload]}, auth=manager.auth, expect_errors=True, bulk=True) - assert res.status_code == 404 - assert res.json['errors'][0]['detail'] == f'{user._id} cannot be found in this OSFGroup' - - # test user is last manager - invalid_payload = create_bulk_delete_payload(osf_group._id, manager._id) - res = app.delete_json_api(url, {'data': [invalid_payload]}, auth=manager.auth, expect_errors=True, bulk=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'Group must have at least one manager.' - - # test user is last registered manager - osf_group.add_unregistered_member('Crazy 8s', 'eight@cos.io', Auth(manager), MANAGER) - assert len(osf_group.managers) == 2 - res = app.delete_json_api(url, {'data': [invalid_payload]}, auth=manager.auth, expect_errors=True, bulk=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'Group must have at least one manager.' diff --git a/api_tests/osf_groups/views/test_osf_groups_list.py b/api_tests/osf_groups/views/test_osf_groups_list.py deleted file mode 100644 index 89ef26ab96b..00000000000 --- a/api_tests/osf_groups/views/test_osf_groups_list.py +++ /dev/null @@ -1,151 +0,0 @@ -import pytest -from waffle.testutils import override_flag - -from api.base.settings.defaults import API_BASE -from osf.models import OSFGroup -from osf_tests.factories import ( - AuthUserFactory, - OSFGroupFactory, -) -from osf.features import OSF_GROUPS - -@pytest.fixture() -def user(): - return AuthUserFactory() - -@pytest.fixture() -def manager(): - return AuthUserFactory() - -@pytest.fixture() -def member(): - return AuthUserFactory() - -@pytest.fixture() -def osf_group(manager, member): - group = OSFGroupFactory(name='Platform Team', creator=manager) - group.make_member(member) - return group - -@pytest.mark.django_db -class TestGroupList: - - @pytest.fixture() - def url(self): - return f'/{API_BASE}groups/' - - def test_return(self, app, member, manager, user, osf_group, url): - with override_flag(OSF_GROUPS, active=True): - # test nonauthenticated - res = app.get(url) - assert res.status_code == 200 - data = res.json['data'] - assert len(data) == 0 - - # test authenticated user - res = app.get(url, auth=user.auth) - assert res.status_code == 200 - data = res.json['data'] - assert len(data) == 0 - - # test authenticated member - res = app.get(url, auth=member.auth) - assert res.status_code == 200 - data = res.json['data'] - assert len(data) == 1 - assert data[0]['id'] == osf_group._id - assert data[0]['type'] == 'groups' - assert data[0]['attributes']['name'] == osf_group.name - - # test authenticated manager - res = app.get(url, auth=manager.auth) - assert res.status_code == 200 - data = res.json['data'] - assert len(data) == 1 - assert data[0]['id'] == osf_group._id - assert data[0]['type'] == 'groups' - assert data[0]['attributes']['name'] == osf_group.name - - def test_groups_filter(self, app, member, manager, user, osf_group, url): - with override_flag(OSF_GROUPS, active=True): - second_group = OSFGroupFactory(name='Apples', creator=manager) - res = app.get(url + '?filter[name]=Platform', auth=manager.auth) - assert res.status_code == 200 - data = res.json['data'] - assert len(data) == 1 - assert data[0]['id'] == osf_group._id - - res = app.get(url + '?filter[name]=Apple', auth=manager.auth) - assert res.status_code == 200 - data = res.json['data'] - assert len(data) == 1 - assert data[0]['id'] == second_group._id - - res = app.get(url + '?filter[bad_field]=Apple', auth=manager.auth, expect_errors=True) - assert res.status_code == 400 - - res = app.get(url + '?filter[name]=Platform') - assert res.status_code == 200 - data = res.json['data'] - assert len(data) == 0 - - res = app.get(url + '?filter[name]=Apple') - assert res.status_code == 200 - data = res.json['data'] - assert len(data) == 0 - - -@pytest.mark.django_db -class TestOSFGroupCreate: - @pytest.fixture() - def url(self): - return f'/{API_BASE}groups/' - - @pytest.fixture() - def simple_payload(self): - return { - 'data': { - 'type': 'groups', - 'attributes': { - 'name': 'My New Lab' - }, - } - } - - def test_create_osf_group(self, app, url, manager, simple_payload): - # Nonauthenticated - with override_flag(OSF_GROUPS, active=True): - res = app.post_json_api(url, simple_payload, expect_errors=True) - assert res.status_code == 401 - - # Authenticated - res = app.post_json_api(url, simple_payload, auth=manager.auth) - assert res.status_code == 201 - assert res.json['data']['type'] == 'groups' - assert res.json['data']['attributes']['name'] == 'My New Lab' - group = OSFGroup.objects.get(_id=res.json['data']['id']) - assert group.creator_id == manager.id - assert group.has_permission(manager, 'manage') is True - assert group.has_permission(manager, 'member') is True - - def test_create_osf_group_validation_errors(self, app, url, manager, simple_payload): - # Need data key - with override_flag(OSF_GROUPS, active=True): - res = app.post_json_api(url, simple_payload['data'], auth=manager.auth, expect_errors=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'Request must include /data.' - - # Incorrect type - simple_payload['data']['type'] = 'incorrect_type' - res = app.post_json_api(url, simple_payload, auth=manager.auth, expect_errors=True) - assert res.status_code == 409 - - # Required name field - payload = { - 'data': { - 'type': 'groups' - } - } - res = app.post_json_api(url, payload, auth=manager.auth, expect_errors=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'This field is required.' diff --git a/api_tests/preprints/views/test_preprint_contributors_list.py b/api_tests/preprints/views/test_preprint_contributors_list.py index ce69697ff0c..6676b542b60 100644 --- a/api_tests/preprints/views/test_preprint_contributors_list.py +++ b/api_tests/preprints/views/test_preprint_contributors_list.py @@ -1345,15 +1345,15 @@ def test_add_contributor_validation(self, preprint_published, validate_data): @pytest.mark.django_db @pytest.mark.enable_enqueue_task +@pytest.mark.usefixtures('mock_send_grid') class TestPreprintContributorCreateEmail(NodeCRUDTestCase): @pytest.fixture() def url_preprint_contribs(self, preprint_published): return f'/{API_BASE}preprints/{preprint_published._id}/contributors/' - @mock.patch('framework.auth.views.mails.send_mail') def test_add_contributor_no_email_if_false( - self, mock_mail, app, user, url_preprint_contribs): + self, mock_send_grid, app, user, url_preprint_contribs): url = f'{url_preprint_contribs}?send_email=false' payload = { 'data': { @@ -1364,13 +1364,13 @@ def test_add_contributor_no_email_if_false( } } } + mock_send_grid.reset_mock() res = app.post_json_api(url, payload, auth=user.auth) assert res.status_code == 201 - assert mock_mail.call_count == 0 + assert mock_send_grid.call_count == 0 - @mock.patch('framework.auth.views.mails.send_mail') def test_add_contributor_needs_preprint_filter_to_send_email( - self, mock_mail, app, user, user_two, + self, mock_send_grid, app, user, user_two, url_preprint_contribs): url = f'{url_preprint_contribs}?send_email=default' payload = { @@ -1389,10 +1389,11 @@ def test_add_contributor_needs_preprint_filter_to_send_email( } } + mock_send_grid.reset_mock() res = app.post_json_api(url, payload, auth=user.auth, expect_errors=True) assert res.status_code == 400 assert res.json['errors'][0]['detail'] == 'default is not a valid email preference.' - assert mock_mail.call_count == 0 + assert mock_send_grid.call_count == 0 @mock.patch('website.project.signals.contributor_added.send') def test_add_contributor_signal_if_preprint( @@ -1419,9 +1420,8 @@ def test_add_contributor_signal_if_preprint( assert mock_send.call_count == 1 assert 'preprint' == kwargs['email_template'] - @mock.patch('framework.auth.views.mails.send_mail') def test_add_unregistered_contributor_sends_email( - self, mock_mail, app, user, url_preprint_contribs): + self, mock_send_grid, app, user, url_preprint_contribs): url = f'{url_preprint_contribs}?send_email=preprint' payload = { 'data': { @@ -1432,9 +1432,11 @@ def test_add_unregistered_contributor_sends_email( } } } + + mock_send_grid.reset_mock() res = app.post_json_api(url, payload, auth=user.auth) assert res.status_code == 201 - assert mock_mail.call_count == 1 + assert mock_send_grid.call_count == 1 @mock.patch('website.project.signals.unreg_contributor_added.send') def test_add_unregistered_contributor_signal_if_preprint( @@ -1455,9 +1457,8 @@ def test_add_unregistered_contributor_signal_if_preprint( assert 'preprint' == kwargs['email_template'] assert mock_send.call_count == 1 - @mock.patch('framework.auth.views.mails.send_mail') def test_add_contributor_invalid_send_email_param( - self, mock_mail, app, user, url_preprint_contribs): + self, mock_send_grid, app, user, url_preprint_contribs): url = f'{url_preprint_contribs}?send_email=true' payload = { 'data': { @@ -1468,16 +1469,16 @@ def test_add_contributor_invalid_send_email_param( } } } + mock_send_grid.reset_mock() res = app.post_json_api( url, payload, auth=user.auth, expect_errors=True) assert res.status_code == 400 assert res.json['errors'][0]['detail'] == 'true is not a valid email preference.' - assert mock_mail.call_count == 0 + assert mock_send_grid.call_count == 0 - @mock.patch('framework.auth.views.mails.send_mail') def test_add_unregistered_contributor_without_email_no_email( - self, mock_mail, app, user, url_preprint_contribs): + self, mock_send_grid, app, user, url_preprint_contribs): url = f'{url_preprint_contribs}?send_email=preprint' payload = { 'data': { @@ -1488,16 +1489,16 @@ def test_add_unregistered_contributor_without_email_no_email( } } + mock_send_grid.reset_mock() with capture_signals() as mock_signal: res = app.post_json_api(url, payload, auth=user.auth) assert contributor_added in mock_signal.signals_sent() assert res.status_code == 201 - assert mock_mail.call_count == 0 + assert mock_send_grid.call_count == 0 - @mock.patch('framework.auth.views.mails.send_mail') @mock.patch('osf.models.preprint.update_or_enqueue_on_preprint_updated') def test_publishing_preprint_sends_emails_to_contributors( - self, mock_update, mock_mail, app, user, url_preprint_contribs, preprint_unpublished): + self, mock_update, mock_send_grid, app, user, url_preprint_contribs, preprint_unpublished): url = f'/{API_BASE}preprints/{preprint_unpublished._id}/' user_two = AuthUserFactory() preprint_unpublished.add_contributor(user_two, permissions=permissions.WRITE, save=True) @@ -1535,9 +1536,8 @@ def test_contributor_added_signal_not_specified( assert 'preprint' == kwargs['email_template'] assert mock_send.call_count == 1 - @mock.patch('framework.auth.views.mails.send_mail') def test_contributor_added_not_sent_if_unpublished( - self, mock_mail, app, user, preprint_unpublished): + self, mock_send_grid, app, user, preprint_unpublished): url = f'/{API_BASE}preprints/{preprint_unpublished._id}/contributors/?send_email=preprint' payload = { 'data': { @@ -1548,9 +1548,10 @@ def test_contributor_added_not_sent_if_unpublished( } } } + mock_send_grid.reset_mock() res = app.post_json_api(url, payload, auth=user.auth) assert res.status_code == 201 - assert mock_mail.call_count == 0 + assert mock_send_grid.call_count == 0 @pytest.mark.django_db diff --git a/api_tests/providers/collections/views/test_collections_provider_moderator_list.py b/api_tests/providers/collections/views/test_collections_provider_moderator_list.py index 107cd5ac054..20d081e8709 100644 --- a/api_tests/providers/collections/views/test_collections_provider_moderator_list.py +++ b/api_tests/providers/collections/views/test_collections_provider_moderator_list.py @@ -1,4 +1,3 @@ -from unittest import mock import pytest from api.base.settings.defaults import API_BASE @@ -90,17 +89,16 @@ def test_GET_admin_with_filter(self, app, url, nonmoderator, moderator, admin, p @pytest.mark.django_db +@pytest.mark.usefixtures('mock_send_grid') class TestPOSTCollectionsModeratorList: - @mock.patch('framework.auth.views.mails.send_mail') - def test_POST_unauthorized(self, mock_mail, app, url, nonmoderator, moderator, provider): + def test_POST_unauthorized(self, mock_send_grid, app, url, nonmoderator, moderator, provider): payload = make_payload(user_id=nonmoderator._id, permission_group='moderator') res = app.post(url, payload, expect_errors=True) assert res.status_code == 401 - assert mock_mail.call_count == 0 + assert mock_send_grid.call_count == 0 - @mock.patch('framework.auth.views.mails.send_mail') - def test_POST_forbidden(self, mock_mail, app, url, nonmoderator, moderator, provider): + def test_POST_forbidden(self, mock_send_grid, app, url, nonmoderator, moderator, provider): payload = make_payload(user_id=nonmoderator._id, permission_group='moderator') res = app.post(url, payload, auth=nonmoderator.auth, expect_errors=True) @@ -109,58 +107,53 @@ def test_POST_forbidden(self, mock_mail, app, url, nonmoderator, moderator, prov res = app.post(url, payload, auth=moderator.auth, expect_errors=True) assert res.status_code == 403 - assert mock_mail.call_count == 0 + assert mock_send_grid.call_count == 0 - @mock.patch('framework.auth.views.mails.send_mail') - def test_POST_admin_success_existing_user(self, mock_mail, app, url, nonmoderator, moderator, admin, provider): + def test_POST_admin_success_existing_user(self, mock_send_grid, app, url, nonmoderator, moderator, admin, provider): payload = make_payload(user_id=nonmoderator._id, permission_group='moderator') res = app.post_json_api(url, payload, auth=admin.auth) assert res.status_code == 201 assert res.json['data']['id'] == nonmoderator._id assert res.json['data']['attributes']['permission_group'] == 'moderator' - assert mock_mail.call_count == 1 + assert mock_send_grid.call_count == 1 - @mock.patch('framework.auth.views.mails.send_mail') - def test_POST_admin_failure_existing_moderator(self, mock_mail, app, url, moderator, admin, provider): + def test_POST_admin_failure_existing_moderator(self, mock_send_grid, app, url, moderator, admin, provider): payload = make_payload(user_id=moderator._id, permission_group='moderator') res = app.post_json_api(url, payload, auth=admin.auth, expect_errors=True) assert res.status_code == 400 - assert mock_mail.call_count == 0 + assert mock_send_grid.call_count == 0 - @mock.patch('framework.auth.views.mails.send_mail') - def test_POST_admin_failure_unreg_moderator(self, mock_mail, app, url, moderator, nonmoderator, admin, provider): + def test_POST_admin_failure_unreg_moderator(self, mock_send_grid, app, url, moderator, nonmoderator, admin, provider): unreg_user = {'full_name': 'Jalen Hurts', 'email': '1eagles@allbatman.org'} # test_user_with_no_moderator_admin_permissions payload = make_payload(permission_group='moderator', **unreg_user) res = app.post_json_api(url, payload, auth=nonmoderator.auth, expect_errors=True) assert res.status_code == 403 - assert mock_mail.call_count == 0 + assert mock_send_grid.call_count == 0 # test_user_with_moderator_admin_permissions payload = make_payload(permission_group='moderator', **unreg_user) res = app.post_json_api(url, payload, auth=admin.auth) assert res.status_code == 201 - assert mock_mail.call_count == 1 - assert mock_mail.call_args[0][0] == unreg_user['email'] + assert mock_send_grid.call_count == 1 + assert mock_send_grid.call_args[1]['to_addr'] == unreg_user['email'] - @mock.patch('framework.auth.views.mails.send_mail') - def test_POST_admin_failure_invalid_group(self, mock_mail, app, url, nonmoderator, moderator, admin, provider): + def test_POST_admin_failure_invalid_group(self, mock_send_grid, app, url, nonmoderator, moderator, admin, provider): payload = make_payload(user_id=nonmoderator._id, permission_group='citizen') res = app.post_json_api(url, payload, auth=admin.auth, expect_errors=True) assert res.status_code == 400 - assert mock_mail.call_count == 0 + assert mock_send_grid.call_count == 0 - @mock.patch('framework.auth.views.mails.send_mail') - def test_POST_admin_success_email(self, mock_mail, app, url, nonmoderator, moderator, admin, provider): + def test_POST_admin_success_email(self, mock_send_grid, app, url, nonmoderator, moderator, admin, provider): payload = make_payload(email='somenewuser@gmail.com', full_name='Some User', permission_group='moderator') res = app.post_json_api(url, payload, auth=admin.auth) assert res.status_code == 201 assert len(res.json['data']['id']) == 5 assert res.json['data']['attributes']['permission_group'] == 'moderator' assert 'email' not in res.json['data']['attributes'] - assert mock_mail.call_count == 1 + assert mock_send_grid.call_count == 1 def test_moderators_alphabetically(self, app, url, admin, moderator, provider): admin.fullname = 'Flecher Cox' diff --git a/api_tests/providers/preprints/views/test_preprint_provider_moderator_list.py b/api_tests/providers/preprints/views/test_preprint_provider_moderator_list.py index fbcfd32a99b..8998d2a85ca 100644 --- a/api_tests/providers/preprints/views/test_preprint_provider_moderator_list.py +++ b/api_tests/providers/preprints/views/test_preprint_provider_moderator_list.py @@ -1,4 +1,3 @@ -from unittest import mock import pytest from api.base.settings.defaults import API_BASE @@ -9,6 +8,7 @@ from osf.utils import permissions +@pytest.mark.usefixtures('mock_send_grid') class ProviderModeratorListTestClass: @pytest.fixture() @@ -68,8 +68,7 @@ def test_list_get_admin_with_filter(self, app, url, nonmoderator, moderator, adm assert res.json['data'][0]['id'] == admin._id assert res.json['data'][0]['attributes']['permission_group'] == permissions.ADMIN - @mock.patch('framework.auth.views.mails.send_mail') - def test_list_post_unauthorized(self, mock_mail, app, url, nonmoderator, moderator, provider): + def test_list_post_unauthorized(self, mock_send_grid, app, url, nonmoderator, moderator, provider): payload = self.create_payload(user_id=nonmoderator._id, permission_group='moderator') res = app.post(url, payload, expect_errors=True) assert res.status_code == 401 @@ -80,58 +79,53 @@ def test_list_post_unauthorized(self, mock_mail, app, url, nonmoderator, moderat res = app.post(url, payload, auth=moderator.auth, expect_errors=True) assert res.status_code == 403 - assert mock_mail.call_count == 0 + assert mock_send_grid.call_count == 0 - @mock.patch('framework.auth.views.mails.send_mail') - def test_list_post_admin_success_existing_user(self, mock_mail, app, url, nonmoderator, moderator, admin, provider): + def test_list_post_admin_success_existing_user(self, mock_send_grid, app, url, nonmoderator, moderator, admin, provider): payload = self.create_payload(user_id=nonmoderator._id, permission_group='moderator') res = app.post_json_api(url, payload, auth=admin.auth) assert res.status_code == 201 assert res.json['data']['id'] == nonmoderator._id assert res.json['data']['attributes']['permission_group'] == 'moderator' - assert mock_mail.call_count == 1 + assert mock_send_grid.call_count == 1 - @mock.patch('framework.auth.views.mails.send_mail') - def test_list_post_admin_failure_existing_moderator(self, mock_mail, app, url, moderator, admin, provider): + def test_list_post_admin_failure_existing_moderator(self, mock_send_grid, app, url, moderator, admin, provider): payload = self.create_payload(user_id=moderator._id, permission_group='moderator') res = app.post_json_api(url, payload, auth=admin.auth, expect_errors=True) assert res.status_code == 400 - assert mock_mail.call_count == 0 + assert mock_send_grid.call_count == 0 - @mock.patch('framework.auth.views.mails.send_mail') - def test_list_post_admin_failure_unreg_moderator(self, mock_mail, app, url, moderator, nonmoderator, admin, provider): + def test_list_post_admin_failure_unreg_moderator(self, mock_send_grid, app, url, moderator, nonmoderator, admin, provider): unreg_user = {'full_name': 'Son Goku', 'email': 'goku@dragonball.org'} # test_user_with_no_moderator_admin_permissions payload = self.create_payload(permission_group='moderator', **unreg_user) res = app.post_json_api(url, payload, auth=nonmoderator.auth, expect_errors=True) assert res.status_code == 403 - assert mock_mail.call_count == 0 + assert mock_send_grid.call_count == 0 # test_user_with_moderator_admin_permissions payload = self.create_payload(permission_group='moderator', **unreg_user) res = app.post_json_api(url, payload, auth=admin.auth) assert res.status_code == 201 - assert mock_mail.call_count == 1 - assert mock_mail.call_args[0][0] == unreg_user['email'] + assert mock_send_grid.call_count == 1 + assert mock_send_grid.call_args[1]['to_addr'] == unreg_user['email'] - @mock.patch('framework.auth.views.mails.send_mail') - def test_list_post_admin_failure_invalid_group(self, mock_mail, app, url, nonmoderator, moderator, admin, provider): + def test_list_post_admin_failure_invalid_group(self, mock_send_grid, app, url, nonmoderator, moderator, admin, provider): payload = self.create_payload(user_id=nonmoderator._id, permission_group='citizen') res = app.post_json_api(url, payload, auth=admin.auth, expect_errors=True) assert res.status_code == 400 - assert mock_mail.call_count == 0 + assert mock_send_grid.call_count == 0 - @mock.patch('framework.auth.views.mails.send_mail') - def test_list_post_admin_success_email(self, mock_mail, app, url, nonmoderator, moderator, admin, provider): + def test_list_post_admin_success_email(self, mock_send_grid, app, url, nonmoderator, moderator, admin, provider): payload = self.create_payload(email='somenewuser@gmail.com', full_name='Some User', permission_group='moderator') res = app.post_json_api(url, payload, auth=admin.auth) assert res.status_code == 201 assert len(res.json['data']['id']) == 5 assert res.json['data']['attributes']['permission_group'] == 'moderator' assert 'email' not in res.json['data']['attributes'] - assert mock_mail.call_count == 1 + assert mock_send_grid.call_count == 1 def test_list_moderators_alphabetically(self, app, url, admin, moderator, provider): admin.fullname = 'Alice Alisdottir' diff --git a/api_tests/providers/tasks/test_bulk_upload.py b/api_tests/providers/tasks/test_bulk_upload.py index 40003fb6931..221861ea313 100644 --- a/api_tests/providers/tasks/test_bulk_upload.py +++ b/api_tests/providers/tasks/test_bulk_upload.py @@ -1,4 +1,3 @@ -from unittest import mock import pytest import uuid @@ -8,13 +7,10 @@ from osf.models import RegistrationBulkUploadJob, RegistrationBulkUploadRow, RegistrationProvider, RegistrationSchema from osf.models.registration_bulk_upload_job import JobState from osf.models.registration_bulk_upload_row import RegistrationBulkUploadContributors -from osf.registrations.utils import get_registration_provider_submissions_url from osf.utils.permissions import ADMIN, READ, WRITE from osf_tests.factories import InstitutionFactory, SubjectFactory, UserFactory -from website import mails, settings - class TestRegistrationBulkUploadContributors: @@ -67,6 +63,7 @@ def test_error_message_default(self): @pytest.mark.django_db +@pytest.mark.usefixtures('mock_send_grid') class TestBulkUploadTasks: @pytest.fixture() @@ -320,9 +317,7 @@ def test_bulk_creation_dry_run(self, registration_row_1, registration_row_2, upl assert upload_job_done_full.state == JobState.PICKED_UP assert not upload_job_done_full.email_sent - @mock.patch('website.mails.settings.USE_EMAIL', False) - @mock.patch('website.mails.send_mail', return_value=None, side_effect=mails.send_mail) - def test_bulk_creation_done_full(self, mock_send_mail, registration_row_1, registration_row_2, + def test_bulk_creation_done_full(self, mock_send_grid, registration_row_1, registration_row_2, upload_job_done_full, provider, initiator, read_contributor, write_contributor): bulk_create_registrations(upload_job_done_full.id, dry_run=False) @@ -340,18 +335,9 @@ def test_bulk_creation_done_full(self, mock_send_mail, registration_row_1, regis assert row.draft_registration.contributor_set.get(user=write_contributor).permission == WRITE assert row.draft_registration.contributor_set.get(user=read_contributor).permission == READ - mock_send_mail.assert_called_with( - to_addr=initiator.username, - mail=mails.REGISTRATION_BULK_UPLOAD_SUCCESS_ALL, - fullname=initiator.fullname, - auto_approval=False, - count=2, - pending_submissions_url=get_registration_provider_submissions_url(provider), - ) - - @mock.patch('website.mails.settings.USE_EMAIL', False) - @mock.patch('website.mails.send_mail', return_value=None, side_effect=mails.send_mail) - def test_bulk_creation_done_partial(self, mock_send_mail, registration_row_3, + mock_send_grid.assert_called() + + def test_bulk_creation_done_partial(self, mock_send_grid, registration_row_3, registration_row_invalid_extra_bib_1, upload_job_done_partial, provider, initiator, read_contributor, write_contributor): @@ -369,26 +355,9 @@ def test_bulk_creation_done_partial(self, mock_send_mail, registration_row_3, assert registration_row_3.draft_registration.contributor_set.get(user=write_contributor).permission == WRITE assert registration_row_3.draft_registration.contributor_set.get(user=read_contributor).permission == READ - mock_send_mail.assert_called_with( - to_addr=initiator.username, - mail=mails.REGISTRATION_BULK_UPLOAD_SUCCESS_PARTIAL, - fullname=initiator.fullname, - auto_approval=False, - approval_errors=[], - draft_errors=[ - 'Title: Test title Invalid - Extra Bibliographic Contributor, External ID: 90-=ijkl, ' - 'Error: Bibliographic contributors must be one of admin, read-only or read-write' - ], - total=2, - successes=1, - failures=1, - pending_submissions_url=get_registration_provider_submissions_url(provider), - osf_support_email=settings.OSF_SUPPORT_EMAIL, - ) - - @mock.patch('website.mails.settings.USE_EMAIL', False) - @mock.patch('website.mails.send_mail', return_value=None, side_effect=mails.send_mail) - def test_bulk_creation_done_error(self, mock_send_mail, registration_row_invalid_extra_bib_2, + mock_send_grid.assert_called() + + def test_bulk_creation_done_error(self, mock_send_grid, registration_row_invalid_extra_bib_2, registration_row_invalid_affiliation, upload_job_done_error, provider, initiator, read_contributor, write_contributor, institution): @@ -398,16 +367,4 @@ def test_bulk_creation_done_error(self, mock_send_mail, registration_row_invalid assert upload_job_done_error.email_sent assert len(RegistrationBulkUploadRow.objects.filter(upload__id=upload_job_done_error.id)) == 0 - mock_send_mail.assert_called_with( - to_addr=initiator.username, - mail=mails.REGISTRATION_BULK_UPLOAD_FAILURE_ALL, - fullname=initiator.fullname, - draft_errors=[ - 'Title: Test title Invalid - Extra Bibliographic Contributor, External ID: 90-=ijkl, ' - 'Error: Bibliographic contributors must be one of admin, read-only or read-write', - f'Title: Test title Invalid - Unauthorized Affiliation, External ID: mnopqrst, ' - f'Error: Initiator [{initiator._id}] is not affiliated with institution [{institution._id}]', - ], - count=2, - osf_support_email=settings.OSF_SUPPORT_EMAIL, - ) + mock_send_grid.assert_called() diff --git a/api_tests/registrations/views/test_registration_detail.py b/api_tests/registrations/views/test_registration_detail.py index 3d3f6528abb..9112d0a3264 100644 --- a/api_tests/registrations/views/test_registration_detail.py +++ b/api_tests/registrations/views/test_registration_detail.py @@ -26,7 +26,6 @@ AuthUserFactory, UnregUserFactory, WithdrawnRegistrationFactory, - OSFGroupFactory, CommentFactory, InstitutionFactory, ) @@ -399,27 +398,6 @@ def test_update_registration( assert res.status_code == 403 assert res.json['errors'][0]['detail'] == 'You do not have permission to perform this action.' - # test_osf_group_member_write_cannot_update_registration - group_mem = AuthUserFactory() - group = OSFGroupFactory(creator=group_mem) - public_project.add_osf_group(group, permissions.WRITE) - res = app.put_json_api( - public_url, - public_to_private_payload, - auth=group_mem.auth, - expect_errors=True) - assert res.status_code == 403 - - # test_osf_group_member_admin_cannot_update_registration - public_project.remove_osf_group(group) - public_project.add_osf_group(group, permissions.ADMIN) - res = app.put_json_api( - public_url, - public_to_private_payload, - auth=group_mem.auth, - expect_errors=True) - assert res.status_code == 403 - @pytest.mark.usefixtures('mock_gravy_valet_get_verified_links') def test_fields( self, app, user, public_registration, @@ -717,6 +695,7 @@ def test_read_write_contributor_can_edit_writeable_fields( @pytest.mark.django_db +@pytest.mark.usefixtures('mock_send_grid') class TestRegistrationWithdrawal(TestRegistrationUpdateTestCase): @pytest.fixture @@ -775,15 +754,14 @@ def test_initiate_withdraw_registration_fails( res = app.put_json_api(public_url, public_payload, auth=user.auth, expect_errors=True) assert res.status_code == 400 - @mock.patch('website.mails.send_mail') - def test_initiate_withdrawal_success(self, mock_send_mail, app, user, public_registration, public_url, public_payload): + def test_initiate_withdrawal_success(self, mock_send_grid, app, user, public_registration, public_url, public_payload): res = app.put_json_api(public_url, public_payload, auth=user.auth) assert res.status_code == 200 assert res.json['data']['attributes']['pending_withdrawal'] is True public_registration.refresh_from_db() assert public_registration.is_pending_retraction assert public_registration.registered_from.logs.first().action == 'retraction_initiated' - assert mock_send_mail.called + assert mock_send_grid.called @pytest.mark.usefixtures('mock_gravy_valet_get_verified_links') def test_initiate_withdrawal_with_embargo_ends_embargo( @@ -807,9 +785,8 @@ def test_initiate_withdrawal_with_embargo_ends_embargo( assert public_registration.is_pending_retraction assert not public_registration.is_pending_embargo - @mock.patch('website.mails.send_mail') def test_withdraw_request_does_not_send_email_to_unregistered_admins( - self, mock_send_mail, app, user, public_registration, public_url, public_payload): + self, mock_send_grid, app, user, public_registration, public_url, public_payload): unreg = UnregUserFactory() with disconnected_from_listeners(contributor_added): public_registration.add_unregistered_contributor( @@ -826,7 +803,7 @@ def test_withdraw_request_does_not_send_email_to_unregistered_admins( # Only the creator gets an email; the unreg user does not get emailed assert public_registration._contributors.count() == 2 - assert mock_send_mail.call_count == 1 + assert mock_send_grid.call_count == 3 @pytest.mark.django_db diff --git a/api_tests/registrations/views/test_registration_list.py b/api_tests/registrations/views/test_registration_list.py index 0a43be0a2b7..e73a6e81cc0 100644 --- a/api_tests/registrations/views/test_registration_list.py +++ b/api_tests/registrations/views/test_registration_list.py @@ -23,7 +23,6 @@ AuthUserFactory, CollectionFactory, DraftRegistrationFactory, - OSFGroupFactory, NodeLicenseRecordFactory, TagFactory, SubjectFactory, @@ -841,13 +840,6 @@ def test_cannot_create_registration( res = app.post_json_api(url_registrations, payload, expect_errors=True) assert res.status_code == 401 - # admin via a group cannot create registration - group_mem = AuthUserFactory() - group = OSFGroupFactory(creator=group_mem) - project_public.add_osf_group(group, permissions.ADMIN) - res = app.post_json_api(url_registrations, payload, auth=group_mem.auth, expect_errors=True) - assert res.status_code == 403 - @mock.patch('framework.celery_tasks.handlers.enqueue_task') def test_registration_draft_must_be_specified( self, mock_enqueue, app, user, payload, url_registrations): @@ -1602,7 +1594,6 @@ def test_create_registration_with_manual_guid(self, app, user, schema, url_regis def test_need_admin_perms_on_draft( self, mock_enqueue, app, user, schema, payload_ver, url_registrations_ver): user_two = AuthUserFactory() - group = OSFGroupFactory(creator=user) # User is an admin contributor on draft registration but not on node draft_registration = DraftRegistrationFactory(creator=user_two, registration_schema=schema) @@ -1628,17 +1619,6 @@ def test_need_admin_perms_on_draft( res = app.post_json_api(url_registrations_ver, payload_ver, auth=user.auth) assert res.status_code == 201 - # User is an admin group contributor on the node but not on draft registration - draft_registration = DraftRegistrationFactory(creator=user_two, registration_schema=schema) - draft_registration.branched_from.add_osf_group(group, permissions.ADMIN) - payload_ver['data']['attributes']['draft_registration_id'] = draft_registration._id - assert draft_registration.branched_from.is_admin_contributor(user) is False - assert draft_registration.branched_from.has_permission(user, permissions.ADMIN) is True - assert draft_registration.has_permission(user, permissions.ADMIN) is False - res = app.post_json_api(url_registrations_ver, payload_ver, auth=user.auth, expect_errors=True) - assert res.status_code == 403 - assert res.json['errors'][0]['detail'] == 'You must be an admin contributor on the draft registration to create a registration.' - # User is an admin contributor on node but not on draft registration draft_registration = DraftRegistrationFactory(creator=user_two, registration_schema=schema) draft_registration.add_contributor(user, permissions.WRITE) diff --git a/api_tests/requests/views/test_node_request_institutional_access.py b/api_tests/requests/views/test_node_request_institutional_access.py index ca2a2c477e4..d868739e9bd 100644 --- a/api_tests/requests/views/test_node_request_institutional_access.py +++ b/api_tests/requests/views/test_node_request_institutional_access.py @@ -1,4 +1,3 @@ -from unittest import mock import pytest from api.base.settings.defaults import API_BASE @@ -6,12 +5,11 @@ from osf_tests.factories import NodeFactory, InstitutionFactory, AuthUserFactory from osf.utils.workflows import DefaultStates, NodeRequestTypes -from website import language -from website.mails import NODE_REQUEST_INSTITUTIONAL_ACCESS_REQUEST from framework.auth import Auth @pytest.mark.django_db +@pytest.mark.usefixtures('mock_send_grid') class TestNodeRequestListInstitutionalAccess(NodeRequestTestMixin): @pytest.fixture() @@ -208,85 +206,37 @@ def test_institutional_admin_unauth_institution(self, app, project, institution_ assert res.status_code == 403 assert 'Institutional request access is not enabled.' in res.json['errors'][0]['detail'] - @mock.patch('api.requests.serializers.send_mail') - @mock.patch('osf.utils.machines.mails.send_mail') - def test_email_send_institutional_request_specific_email( - self, - mock_send_mail_machines, - mock_send_mail_serializers, - user_with_affiliation, - app, - project, - url, - create_payload, - institutional_admin, - institution - ): - """ - Test that the institutional request triggers email notifications to appropriate recipients. - """ - # Set up mock behaviors - project.is_public = True - project.save() - - # Perform the action - res = app.post_json_api(url, create_payload, auth=institutional_admin.auth) - - # Ensure response is successful - assert res.status_code == 201 - - assert mock_send_mail_serializers.call_count == 1 - assert mock_send_mail_machines.call_count == 0 - - # Check calls for osf.utils.machines.mails.send_mail - mock_send_mail_serializers.assert_called_once_with( - to_addr=user_with_affiliation.username, - mail=NODE_REQUEST_INSTITUTIONAL_ACCESS_REQUEST, - user=user_with_affiliation, - bcc_addr=None, - reply_to=None, - **{ - 'sender': institutional_admin, - 'recipient': user_with_affiliation, - 'comment': create_payload['data']['attributes']['comment'], - 'institution': institution, - 'osf_url': mock.ANY, - 'node': project, - } - ) - - @mock.patch('api.requests.serializers.send_mail') - def test_email_not_sent_without_recipient(self, mock_mail, app, project, institutional_admin, url, + def test_email_not_sent_without_recipient(self, mock_send_grid, app, project, institutional_admin, url, create_payload, institution): """ Test that an email is not sent when no recipient is listed when an institutional access request is made, but the request is still made anyway without email. """ del create_payload['data']['relationships']['message_recipient'] + mock_send_grid.reset_mock() res = app.post_json_api(url, create_payload, auth=institutional_admin.auth) assert res.status_code == 201 # Check that an email is sent - assert not mock_mail.called + assert not mock_send_grid.called - @mock.patch('api.requests.serializers.send_mail') - def test_email_not_sent_outside_institution(self, mock_mail, app, project, institutional_admin, url, + def test_email_not_sent_outside_institution(self, mock_send_grid, app, project, institutional_admin, url, create_payload, user_without_affiliation, institution): """ Test that you are prevented from requesting a user with the correct institutional affiliation. """ create_payload['data']['relationships']['message_recipient']['data']['id'] = user_without_affiliation._id + mock_send_grid.reset_mock() res = app.post_json_api(url, create_payload, auth=institutional_admin.auth, expect_errors=True) assert res.status_code == 403 assert f'User {user_without_affiliation._id} is not affiliated with the institution.' in res.json['errors'][0]['detail'] # Check that an email is sent - assert not mock_mail.called + assert not mock_send_grid.called - @mock.patch('api.requests.serializers.send_mail') def test_email_sent_on_creation( self, - mock_mail, + mock_send_grid, app, project, institutional_admin, @@ -298,31 +248,15 @@ def test_email_sent_on_creation( """ Test that an email is sent to the appropriate recipients when an institutional access request is made. """ + mock_send_grid.reset_mock() res = app.post_json_api(url, create_payload, auth=institutional_admin.auth) assert res.status_code == 201 - assert mock_mail.call_count == 1 - - mock_mail.assert_called_with( - to_addr=user_with_affiliation.username, - mail=NODE_REQUEST_INSTITUTIONAL_ACCESS_REQUEST, - user=user_with_affiliation, - bcc_addr=None, - reply_to=None, - **{ - 'sender': institutional_admin, - 'recipient': user_with_affiliation, - 'comment': create_payload['data']['attributes']['comment'], - 'institution': institution, - 'osf_url': mock.ANY, - 'node': project, - } - ) + assert mock_send_grid.call_count == 1 - @mock.patch('api.requests.serializers.send_mail') def test_bcc_institutional_admin( self, - mock_mail, + mock_send_grid, app, project, institutional_admin, @@ -335,32 +269,15 @@ def test_bcc_institutional_admin( Ensure BCC option works as expected, sending messages to sender giving them a copy for themselves. """ create_payload['data']['attributes']['bcc_sender'] = True - + mock_send_grid.reset_mock() res = app.post_json_api(url, create_payload, auth=institutional_admin.auth) assert res.status_code == 201 - assert mock_mail.call_count == 1 - - mock_mail.assert_called_with( - to_addr=user_with_affiliation.username, - mail=NODE_REQUEST_INSTITUTIONAL_ACCESS_REQUEST, - user=user_with_affiliation, - bcc_addr=[institutional_admin.username], - reply_to=None, - **{ - 'sender': institutional_admin, - 'recipient': user_with_affiliation, - 'comment': create_payload['data']['attributes']['comment'], - 'institution': institution, - 'osf_url': mock.ANY, - 'node': project, - } - ) + assert mock_send_grid.call_count == 1 - @mock.patch('api.requests.serializers.send_mail') def test_reply_to_institutional_admin( self, - mock_mail, + mock_send_grid, app, project, institutional_admin, @@ -373,27 +290,11 @@ def test_reply_to_institutional_admin( Ensure reply-to option works as expected, allowing a reply to header be added to the email. """ create_payload['data']['attributes']['reply_to'] = True - + mock_send_grid.reset_mock() res = app.post_json_api(url, create_payload, auth=institutional_admin.auth) assert res.status_code == 201 - assert mock_mail.call_count == 1 - - mock_mail.assert_called_with( - to_addr=user_with_affiliation.username, - mail=NODE_REQUEST_INSTITUTIONAL_ACCESS_REQUEST, - user=user_with_affiliation, - bcc_addr=None, - reply_to=institutional_admin.username, - **{ - 'sender': institutional_admin, - 'recipient': user_with_affiliation, - 'comment': create_payload['data']['attributes']['comment'], - 'institution': institution, - 'osf_url': mock.ANY, - 'node': project, - } - ) + assert mock_send_grid.call_count == 1 def test_access_requests_disabled_raises_permission_denied( self, app, node_with_disabled_access_requests, user_with_affiliation, institutional_admin, create_payload @@ -410,10 +311,9 @@ def test_access_requests_disabled_raises_permission_denied( assert res.status_code == 403 assert f"{node_with_disabled_access_requests._id} does not have Access Requests enabled" in res.json['errors'][0]['detail'] - @mock.patch('api.requests.serializers.send_mail') def test_placeholder_text_when_comment_is_empty( self, - mock_mail, + mock_send_grid, app, project, institutional_admin, @@ -427,24 +327,11 @@ def test_placeholder_text_when_comment_is_empty( """ # Test with empty comment create_payload['data']['attributes']['comment'] = '' + mock_send_grid.reset_mock() res = app.post_json_api(url, create_payload, auth=institutional_admin.auth) assert res.status_code == 201 - mock_mail.assert_called_with( - to_addr=user_with_affiliation.username, - mail=NODE_REQUEST_INSTITUTIONAL_ACCESS_REQUEST, - user=user_with_affiliation, - bcc_addr=None, - reply_to=None, - **{ - 'sender': institutional_admin, - 'recipient': user_with_affiliation, - 'comment': language.EMPTY_REQUEST_INSTITUTIONAL_ACCESS_REQUEST_TEXT, - 'institution': institution, - 'osf_url': mock.ANY, - 'node': project, - } - ) + mock_send_grid.assert_called() def test_requester_can_resubmit(self, app, project, institutional_admin, url, create_payload): """ diff --git a/api_tests/requests/views/test_node_request_list.py b/api_tests/requests/views/test_node_request_list.py index ec62194b466..41ee66747d4 100644 --- a/api_tests/requests/views/test_node_request_list.py +++ b/api_tests/requests/views/test_node_request_list.py @@ -1,4 +1,3 @@ -from unittest import mock import pytest from api.base.settings.defaults import API_BASE @@ -9,6 +8,7 @@ @pytest.mark.django_db +@pytest.mark.usefixtures('mock_send_grid') class TestNodeRequestListCreate(NodeRequestTestMixin): @pytest.fixture() def url(self, project): @@ -80,25 +80,25 @@ def test_requests_disabled_list(self, app, url, create_payload, project, admin): res = app.get(url, create_payload, auth=admin.auth, expect_errors=True) assert res.status_code == 403 - @mock.patch('website.mails.mails.send_mail') - def test_email_sent_to_all_admins_on_submit(self, mock_mail, app, project, noncontrib, url, create_payload, second_admin): + def test_email_sent_to_all_admins_on_submit(self, mock_send_grid, app, project, noncontrib, url, create_payload, second_admin): project.is_public = True project.save() + mock_send_grid.reset_mock() res = app.post_json_api(url, create_payload, auth=noncontrib.auth) assert res.status_code == 201 - assert mock_mail.call_count == 2 + assert mock_send_grid.call_count == 2 - @mock.patch('website.mails.mails.send_mail') - def test_email_not_sent_to_parent_admins_on_submit(self, mock_mail, app, project, noncontrib, url, create_payload, second_admin): + def test_email_not_sent_to_parent_admins_on_submit(self, mock_send_grid, app, project, noncontrib, url, create_payload, second_admin): component = NodeFactory(parent=project, creator=second_admin) component.is_public = True project.save() url = f'/{API_BASE}nodes/{component._id}/requests/' + mock_send_grid.reset_mock() res = app.post_json_api(url, create_payload, auth=noncontrib.auth) assert res.status_code == 201 assert component.parent_admin_contributors.count() == 1 assert component.contributors.count() == 1 - assert mock_mail.call_count == 1 + assert mock_send_grid.call_count == 1 def test_request_followed_by_added_as_contrib(elf, app, project, noncontrib, admin, url, create_payload): res = app.post_json_api(url, create_payload, auth=noncontrib.auth) diff --git a/api_tests/requests/views/test_preprint_request_list.py b/api_tests/requests/views/test_preprint_request_list.py index d23736aa312..72e16862f7a 100644 --- a/api_tests/requests/views/test_preprint_request_list.py +++ b/api_tests/requests/views/test_preprint_request_list.py @@ -1,4 +1,3 @@ -from unittest import mock import pytest from api.base.settings.defaults import API_BASE @@ -6,6 +5,7 @@ @pytest.mark.django_db +@pytest.mark.usefixtures('mock_send_grid') class TestPreprintRequestListCreate(PreprintRequestTestMixin): def url(self, preprint): return f'/{API_BASE}preprints/{preprint._id}/requests/' @@ -65,8 +65,7 @@ def test_requester_cannot_submit_again(self, app, admin, create_payload, pre_mod assert res.json['errors'][0]['detail'] == 'Users may not have more than one withdrawal request per preprint.' @pytest.mark.skip('TODO: IN-284 -- add emails') - @mock.patch('website.reviews.listeners.mails.send_mail') - def test_email_sent_to_moderators_on_submit(self, mock_mail, app, admin, create_payload, moderator, post_mod_preprint): + def test_email_sent_to_moderators_on_submit(self, mock_send_grid, app, admin, create_payload, moderator, post_mod_preprint): res = app.post_json_api(self.url(post_mod_preprint), create_payload, auth=admin.auth) assert res.status_code == 201 - assert mock_mail.call_count == 1 + assert mock_send_grid.call_count == 1 diff --git a/api_tests/requests/views/test_request_actions_create.py b/api_tests/requests/views/test_request_actions_create.py index 732cbdd83b0..30e579d3ab3 100644 --- a/api_tests/requests/views/test_request_actions_create.py +++ b/api_tests/requests/views/test_request_actions_create.py @@ -1,4 +1,3 @@ -from unittest import mock import pytest from api.base.settings.defaults import API_BASE @@ -8,6 +7,7 @@ @pytest.mark.django_db @pytest.mark.enable_enqueue_task +@pytest.mark.usefixtures('mock_send_grid') class TestCreateNodeRequestAction(NodeRequestTestMixin): @pytest.fixture() def url(self, node_request): @@ -190,8 +190,8 @@ def test_rejects_fail_with_requests_disabled(self, app, admin, url, node_request assert initial_state == node_request.machine_state assert node_request.creator not in node_request.target.contributors - @mock.patch('website.project.views.contributor.mails.send_mail') - def test_email_sent_on_approve(self, mock_mail, app, admin, url, node_request): + def test_email_sent_on_approve(self, mock_send_grid, app, admin, url, node_request): + mock_send_grid.reset_mock() initial_state = node_request.machine_state assert node_request.creator not in node_request.target.contributors payload = self.create_payload(node_request._id, trigger='accept') @@ -200,10 +200,10 @@ def test_email_sent_on_approve(self, mock_mail, app, admin, url, node_request): node_request.reload() assert initial_state != node_request.machine_state assert node_request.creator in node_request.target.contributors - assert mock_mail.call_count == 1 + assert mock_send_grid.call_count == 1 - @mock.patch('website.mails.mails.send_mail') - def test_email_sent_on_reject(self, mock_mail, app, admin, url, node_request): + def test_email_sent_on_reject(self, mock_send_grid, app, admin, url, node_request): + mock_send_grid.reset_mock() initial_state = node_request.machine_state assert node_request.creator not in node_request.target.contributors payload = self.create_payload(node_request._id, trigger='reject') @@ -212,10 +212,10 @@ def test_email_sent_on_reject(self, mock_mail, app, admin, url, node_request): node_request.reload() assert initial_state != node_request.machine_state assert node_request.creator not in node_request.target.contributors - assert mock_mail.call_count == 1 + assert mock_send_grid.call_count == 1 - @mock.patch('website.mails.mails.send_mail') - def test_email_not_sent_on_reject(self, mock_mail, app, requester, url, node_request): + def test_email_not_sent_on_reject(self, mock_send_grid, app, requester, url, node_request): + mock_send_grid.reset_mock() initial_state = node_request.machine_state initial_comment = node_request.comment payload = self.create_payload(node_request._id, trigger='edit_comment', comment='ASDFG') @@ -224,7 +224,7 @@ def test_email_not_sent_on_reject(self, mock_mail, app, requester, url, node_req node_request.reload() assert initial_state == node_request.machine_state assert initial_comment != node_request.comment - assert mock_mail.call_count == 0 + assert mock_send_grid.call_count == 0 def test_set_permissions_on_approve(self, app, admin, url, node_request): assert node_request.creator not in node_request.target.contributors @@ -255,6 +255,7 @@ def test_accept_request_defaults_to_read_and_visible(self, app, admin, url, node @pytest.mark.django_db +@pytest.mark.usefixtures('mock_send_grid') class TestCreatePreprintRequestAction(PreprintRequestTestMixin): @pytest.fixture() def url(self, pre_request, post_request, none_request): @@ -384,8 +385,8 @@ def test_write_contrib_and_noncontrib_cannot_edit_comment(self, app, write_contr assert initial_state == request.machine_state assert initial_comment == request.comment - @mock.patch('website.reviews.listeners.mails.send_mail') - def test_email_sent_on_approve(self, mock_mail, app, moderator, url, pre_request, post_request): + def test_email_sent_on_approve(self, mock_send_grid, app, moderator, url, pre_request, post_request): + mock_send_grid.reset_mock() for request in [pre_request, post_request]: initial_state = request.machine_state assert not request.target.is_retracted @@ -397,11 +398,10 @@ def test_email_sent_on_approve(self, mock_mail, app, moderator, url, pre_request assert initial_state != request.machine_state assert request.target.is_retracted # There are two preprints withdrawn and each preprint have 2 contributors. So 4 emails are sent in total. - assert mock_mail.call_count == 4 + assert mock_send_grid.call_count == 4 @pytest.mark.skip('TODO: IN-331 -- add emails') - @mock.patch('website.reviews.listeners.mails.send_mail') - def test_email_sent_on_reject(self, mock_mail, app, moderator, url, pre_request, post_request): + def test_email_sent_on_reject(self, mock_send_grid, app, moderator, url, pre_request, post_request): for request in [pre_request, post_request]: initial_state = request.machine_state assert not request.target.is_retracted @@ -411,11 +411,10 @@ def test_email_sent_on_reject(self, mock_mail, app, moderator, url, pre_request, request.reload() assert initial_state != request.machine_state assert not request.target.is_retracted - assert mock_mail.call_count == 2 + assert mock_send_grid.call_count == 2 @pytest.mark.skip('TODO: IN-284/331 -- add emails') - @mock.patch('website.reviews.listeners.mails.send_mail') - def test_email_not_sent_on_edit_comment(self, mock_mail, app, moderator, url, pre_request, post_request): + def test_email_not_sent_on_edit_comment(self, mock_send_grid, app, moderator, url, pre_request, post_request): for request in [pre_request, post_request]: initial_state = request.machine_state assert not request.target.is_retracted @@ -425,7 +424,7 @@ def test_email_not_sent_on_edit_comment(self, mock_mail, app, moderator, url, pr request.reload() assert initial_state != request.machine_state assert not request.target.is_retracted - assert mock_mail.call_count == 0 + assert mock_send_grid.call_count == 0 def test_auto_approve(self, app, auto_withdrawable_pre_mod_preprint, auto_approved_pre_request): assert auto_withdrawable_pre_mod_preprint.is_retracted diff --git a/api_tests/sparse/test_sparse_node_list.py b/api_tests/sparse/test_sparse_node_list.py index 8df12d60e4c..8673f074d74 100644 --- a/api_tests/sparse/test_sparse_node_list.py +++ b/api_tests/sparse/test_sparse_node_list.py @@ -3,14 +3,12 @@ from api.base.settings.defaults import API_BASE from framework.auth.core import Auth from osf.models import AbstractNode -from osf.utils import permissions from osf_tests.factories import ( CollectionFactory, ProjectFactory, AuthUserFactory, PreprintFactory, InstitutionFactory, - OSFGroupFactory, DraftNodeFactory, ) from website.views import find_bookmark_collection @@ -111,14 +109,6 @@ def test_return( assert private_project._id not in ids assert draft_node._id not in ids - # test_returns_nodes_through_which_you_have_perms_through_osf_groups - group = OSFGroupFactory(creator=user) - another_project = ProjectFactory() - another_project.add_osf_group(group, permissions.READ) - res = app.get(sparse_url, auth=user.auth) - ids = [each['id'] for each in res.json['data']] - assert another_project._id in ids - def test_node_list_has_proper_root(self, app, user, sparse_url): project_one = ProjectFactory(title='Project One', is_public=True) ProjectFactory(parent=project_one, is_public=True) diff --git a/api_tests/subscriptions/views/test_subscriptions_detail.py b/api_tests/subscriptions/views/test_subscriptions_detail.py index 2a8741fc173..0e2fa22b119 100644 --- a/api_tests/subscriptions/views/test_subscriptions_detail.py +++ b/api_tests/subscriptions/views/test_subscriptions_detail.py @@ -1,8 +1,10 @@ import pytest from api.base.settings.defaults import API_BASE -from osf_tests.factories import AuthUserFactory, NotificationSubscriptionFactory - +from osf_tests.factories import ( + AuthUserFactory, + NotificationSubscriptionFactory +) @pytest.mark.django_db class TestSubscriptionDetail: @@ -16,18 +18,16 @@ def user_no_auth(self): return AuthUserFactory() @pytest.fixture() - def global_user_notification(self, user): - notification = NotificationSubscriptionFactory(_id=f'{user._id}_global', user=user, event_name='global') - notification.add_user_to_subscription(user, 'email_transactional') - return notification + def notification(self, user): + return NotificationSubscriptionFactory(user=user) @pytest.fixture() - def url(self, global_user_notification): - return f'/{API_BASE}subscriptions/{global_user_notification._id}/' + def url(self, notification): + return f'/{API_BASE}subscriptions/{notification._id}/' @pytest.fixture() def url_invalid(self): - return '/{}subscriptions/{}/'.format(API_BASE, 'invalid-notification-id') + return f'/{API_BASE}subscriptions/invalid-notification-id/' @pytest.fixture() def payload(self): @@ -51,56 +51,99 @@ def payload_invalid(self): } } - def test_subscription_detail(self, app, user, user_no_auth, global_user_notification, url, url_invalid, payload, payload_invalid): - # GET with valid notification_id - # Invalid user - res = app.get(url, auth=user_no_auth.auth, expect_errors=True) + def test_subscription_detail_invalid_user(self, app, user, user_no_auth, notification, url, payload): + res = app.get( + url, + auth=user_no_auth.auth, + expect_errors=True + ) assert res.status_code == 403 - # No user - res = app.get(url, expect_errors=True) + + def test_subscription_detail_no_user( + self, app, user, user_no_auth, notification, url, url_invalid, payload, payload_invalid + ): + res = app.get( + url, + expect_errors=True + ) assert res.status_code == 401 - # Valid user + + def test_subscription_detail_valid_user( + self, app, user, user_no_auth, notification, url, url_invalid, payload, payload_invalid + ): + res = app.get(url, auth=user.auth) notification_id = res.json['data']['id'] assert res.status_code == 200 assert notification_id == f'{user._id}_global' - # GET with invalid notification_id - # No user + def test_subscription_detail_invalid_notification_id_no_user( + self, app, user, user_no_auth, notification, url, url_invalid, payload, payload_invalid + ): res = app.get(url_invalid, expect_errors=True) assert res.status_code == 404 - # Existing user - res = app.get(url_invalid, auth=user.auth, expect_errors=True) + + def test_subscription_detail_invalid_notification_id_existing_user( + self, app, user, user_no_auth, notification, url, url_invalid, payload, payload_invalid + ): + res = app.get( + url_invalid, + auth=user.auth, + expect_errors=True + ) assert res.status_code == 404 - # PATCH with valid notification_id and invalid data - # Invalid user + def test_subscription_detail_invalid_payload_403( + self, app, user, user_no_auth, notification, url, url_invalid, payload, payload_invalid + ): res = app.patch_json_api(url, payload_invalid, auth=user_no_auth.auth, expect_errors=True) assert res.status_code == 403 - # No user + + def test_subscription_detail_invalid_payload_401( + self, app, user, user_no_auth, notification, url, url_invalid, payload, payload_invalid + ): res = app.patch_json_api(url, payload_invalid, expect_errors=True) assert res.status_code == 401 - # Valid user - res = app.patch_json_api(url, payload_invalid, auth=user.auth, expect_errors=True) + + def test_subscription_detail_invalid_payload_400( + self, app, user, user_no_auth, notification, url, url_invalid, payload, payload_invalid + ): + res = app.patch_json_api( + url, + payload_invalid, + auth=user.auth, + expect_errors=True + ) assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'Invalid frequency "invalid-frequency"' + assert res.json['errors'][0]['detail'] == ('"invalid-frequency" is not a valid choice.') - # PATCH with invalid notification_id - # No user + def test_subscription_detail_patch_invalid_notification_id_no_user( + self, app, user, user_no_auth, notification, url, url_invalid, payload, payload_invalid + ): res = app.patch_json_api(url_invalid, payload, expect_errors=True) assert res.status_code == 404 - # Existing user + + def test_subscription_detail_patch_invalid_notification_id_existing_user( + self, app, user, user_no_auth, notification, url, url_invalid, payload, payload_invalid + ): res = app.patch_json_api(url_invalid, payload, auth=user.auth, expect_errors=True) assert res.status_code == 404 - # PATCH with valid notification_id and valid data - # Invalid user + def test_subscription_detail_patch_invalid_user( + self, app, user, user_no_auth, notification, url, url_invalid, payload, payload_invalid + ): res = app.patch_json_api(url, payload, auth=user_no_auth.auth, expect_errors=True) assert res.status_code == 403 - # No user + + def test_subscription_detail_patch_no_user( + self, app, user, user_no_auth, notification, url, url_invalid, payload, payload_invalid + ): res = app.patch_json_api(url, payload, expect_errors=True) assert res.status_code == 401 - # Valid user + + def test_subscription_detail_patch( + self, app, user, user_no_auth, notification, url, url_invalid, payload, payload_invalid + ): res = app.patch_json_api(url, payload, auth=user.auth) assert res.status_code == 200 assert res.json['data']['attributes']['frequency'] == 'none' diff --git a/api_tests/subscriptions/views/test_subscriptions_list.py b/api_tests/subscriptions/views/test_subscriptions_list.py index f1131b1fa72..a0a01bf513c 100644 --- a/api_tests/subscriptions/views/test_subscriptions_list.py +++ b/api_tests/subscriptions/views/test_subscriptions_list.py @@ -1,7 +1,13 @@ import pytest from api.base.settings.defaults import API_BASE -from osf_tests.factories import AuthUserFactory, PreprintProviderFactory, ProjectFactory, NotificationSubscriptionFactory +from osf.models import NotificationType +from osf_tests.factories import ( + AuthUserFactory, + PreprintProviderFactory, + ProjectFactory, + NotificationSubscriptionFactory +) @pytest.mark.django_db @@ -23,22 +29,48 @@ def node(self, user): @pytest.fixture() def global_user_notification(self, user): - notification = NotificationSubscriptionFactory(_id=f'{user._id}_global', user=user, event_name='global') - notification.add_user_to_subscription(user, 'email_transactional') - return notification + return NotificationSubscriptionFactory( + notification_type=NotificationType.Type.USER_FILE_UPDATED.instance, + user=user, + ) + + @pytest.fixture() + def file_updated_notification(self, node, user): + return NotificationSubscriptionFactory( + notification_type=NotificationType.Type.NODE_FILES_UPDATED.instance, + subscribed_object=node, + user=user, + ) + + @pytest.fixture() + def provider_notification(self, provider, user): + return NotificationSubscriptionFactory( + notification_type=NotificationType.Type.PROVIDER_NEW_PENDING_SUBMISSIONS.instance, + subscribed_object=provider, + user=user, + ) @pytest.fixture() def url(self, user, node): return f'/{API_BASE}subscriptions/' - def test_list_complete(self, app, user, provider, node, global_user_notification, url): + def test_list_complete( + self, + app, + user, + provider, + node, + global_user_notification, + provider_notification, + file_updated_notification, + url + ): res = app.get(url, auth=user.auth) notification_ids = [item['id'] for item in res.json['data']] - # There should only be 4 notifications: users' global, node's comments, node's file updates and provider's preprint added. - assert len(notification_ids) == 4 + # There should only be 3 notifications: users' global, node's file updates and provider's preprint added. + assert len(notification_ids) == 3 assert f'{user._id}_global' in notification_ids assert f'{provider._id}_new_pending_submissions' in notification_ids - assert f'{node._id}_comments' in notification_ids assert f'{node._id}_file_updated' in notification_ids def test_unauthenticated(self, app, url): @@ -55,8 +87,8 @@ def test_cannot_post_patch_put_or_delete(self, app, url, user): assert put_res.status_code == 405 assert delete_res.status_code == 405 - def test_multiple_values_filter(self, app, url, global_user_notification, user): - res = app.get(url + '?filter[event_name]=comments,global', auth=user.auth) + def test_multiple_values_filter(self, app, url, global_user_notification, file_updated_notification, user): + res = app.get(url + '?filter[event_name]=comments,file_updated', auth=user.auth) assert len(res.json['data']) == 2 for subscription in res.json['data']: subscription['attributes']['event_name'] in ['global', 'comments'] diff --git a/api_tests/users/serializers/test_serializers.py b/api_tests/users/serializers/test_serializers.py index f9e2dbb9cff..6d311c776b2 100644 --- a/api_tests/users/serializers/test_serializers.py +++ b/api_tests/users/serializers/test_serializers.py @@ -9,7 +9,6 @@ PreprintFactory, ProjectFactory, InstitutionFactory, - OSFGroupFactory, ) from tests.utils import make_drf_request_with_version from django.utils import timezone @@ -92,16 +91,6 @@ def public_project(user): def deleted_project(user): return ProjectFactory(creator=user, is_deleted=True) -@pytest.fixture() -def group(user): - return OSFGroupFactory(creator=user, name='Platform') - -@pytest.fixture() -def group_project(group): - project = ProjectFactory() - project.add_osf_group(group) - return project - def pytest_generate_tests(metafunc): # called once per each test function @@ -119,7 +108,7 @@ class TestUserSerializer: 'test_related_counts_equal_related_views': [{ 'field_name': 'nodes', 'expected_count': { - 'user': 5, # this counts the private nodes created by RegistrationFactory + 'user': 4, # this counts the private nodes created by RegistrationFactory 'other_user': 1, 'no_auth': 1 }, @@ -192,7 +181,6 @@ def test_user_serializer(self, user): assert 'institutions' in relationships assert 'preprints' in relationships assert 'registrations' in relationships - assert 'groups' in relationships def test_related_counts_equal_related_views(self, request, @@ -210,9 +198,7 @@ def test_related_counts_equal_related_views(self, private_preprint, withdrawn_preprint, unpublished_preprint, # not in the view/related counts by default - deleted_preprint, - group, - group_project): + deleted_preprint): view_count = self.get_view_count(user, field_name, auth=user) related_count = self.get_related_count(user, field_name, auth=user) diff --git a/api_tests/users/views/test_user_claim.py b/api_tests/users/views/test_user_claim.py index 68e6cfd52dd..0e265021c5c 100644 --- a/api_tests/users/views/test_user_claim.py +++ b/api_tests/users/views/test_user_claim.py @@ -1,4 +1,3 @@ -from unittest import mock import pytest from django.utils import timezone @@ -13,13 +12,9 @@ ) @pytest.mark.django_db +@pytest.mark.usefixtures('mock_send_grid') class TestClaimUser: - @pytest.fixture - def mock_mail(self): - with mock.patch('website.project.views.contributor.mails.send_mail') as patch: - yield patch - @pytest.fixture() def referrer(self): return AuthUserFactory() @@ -121,37 +116,41 @@ def test_claim_unauth_failure(self, app, url, unreg_user, project, wrong_preprin ) assert res.status_code == 401 - def test_claim_unauth_success_with_original_email(self, app, url, project, unreg_user, mock_mail): + def test_claim_unauth_success_with_original_email(self, app, url, project, unreg_user, mock_send_grid): + mock_send_grid.reset_mock() res = app.post_json_api( url.format(unreg_user._id), self.payload(email='david@david.son', id=project._id), ) assert res.status_code == 204 - assert mock_mail.call_count == 1 + assert mock_send_grid.call_count == 1 - def test_claim_unauth_success_with_claimer_email(self, app, url, unreg_user, project, claimer, mock_mail): + def test_claim_unauth_success_with_claimer_email(self, app, url, unreg_user, project, claimer, mock_send_grid): + mock_send_grid.reset_mock() res = app.post_json_api( url.format(unreg_user._id), self.payload(email=claimer.username, id=project._id) ) assert res.status_code == 204 - assert mock_mail.call_count == 2 + assert mock_send_grid.call_count == 2 - def test_claim_unauth_success_with_unknown_email(self, app, url, project, unreg_user, mock_mail): + def test_claim_unauth_success_with_unknown_email(self, app, url, project, unreg_user, mock_send_grid): + mock_send_grid.reset_mock() res = app.post_json_api( url.format(unreg_user._id), self.payload(email='asdf@fdsa.com', id=project._id), ) assert res.status_code == 204 - assert mock_mail.call_count == 2 + assert mock_send_grid.call_count == 2 - def test_claim_unauth_success_with_preprint_id(self, app, url, preprint, unreg_user, mock_mail): + def test_claim_unauth_success_with_preprint_id(self, app, url, preprint, unreg_user, mock_send_grid): + mock_send_grid.reset_mock() res = app.post_json_api( url.format(unreg_user._id), self.payload(email='david@david.son', id=preprint._id), ) assert res.status_code == 204 - assert mock_mail.call_count == 1 + assert mock_send_grid.call_count == 1 def test_claim_auth_failure(self, app, url, claimer, wrong_preprint, project, unreg_user, referrer): _url = url.format(unreg_user._id) @@ -210,9 +209,10 @@ def test_claim_auth_failure(self, app, url, claimer, wrong_preprint, project, un ) assert res.status_code == 403 - def test_claim_auth_throttle_error(self, app, url, claimer, unreg_user, project, mock_mail): + def test_claim_auth_throttle_error(self, app, url, claimer, unreg_user, project, mock_send_grid): unreg_user.unclaimed_records[project._id]['last_sent'] = timezone.now() unreg_user.save() + mock_send_grid.reset_mock() res = app.post_json_api( url.format(unreg_user._id), self.payload(id=project._id), @@ -221,13 +221,14 @@ def test_claim_auth_throttle_error(self, app, url, claimer, unreg_user, project, ) assert res.status_code == 400 assert res.json['errors'][0]['detail'] == 'User account can only be claimed with an existing user once every 24 hours' - assert mock_mail.call_count == 0 + assert mock_send_grid.call_count == 0 - def test_claim_auth_success(self, app, url, claimer, unreg_user, project, mock_mail): + def test_claim_auth_success(self, app, url, claimer, unreg_user, project, mock_send_grid): + mock_send_grid.reset_mock() res = app.post_json_api( url.format(unreg_user._id), self.payload(id=project._id), auth=claimer.auth ) assert res.status_code == 204 - assert mock_mail.call_count == 2 + assert mock_send_grid.call_count == 2 diff --git a/api_tests/users/views/test_user_list.py b/api_tests/users/views/test_user_list.py index 5649411c551..32cc69758d4 100644 --- a/api_tests/users/views/test_user_list.py +++ b/api_tests/users/views/test_user_list.py @@ -14,10 +14,8 @@ from osf_tests.factories import ( AuthUserFactory, UserFactory, - OSFGroupFactory, ProjectFactory, ApiOAuth2ScopeFactory, - RegistrationFactory, Auth, ) from osf.utils.permissions import CREATOR_PERMISSIONS @@ -100,47 +98,6 @@ def test_find_no_user_in_users(self, app, user_one, user_two): assert user_one._id not in ids assert user_two._id not in ids - def test_more_than_one_projects_in_common(self, app, user_one, user_two): - group = OSFGroupFactory(creator=user_one) - group.make_member(user_two) - - project1 = ProjectFactory(creator=user_one) - project1.add_contributor( - contributor=user_two, - permissions=CREATOR_PERMISSIONS, - auth=Auth(user=user_one) - ) - project1.save() - project2 = ProjectFactory(creator=user_one) - project2.add_contributor( - contributor=user_two, - permissions=CREATOR_PERMISSIONS, - auth=Auth(user=user_one) - ) - project2.save() - - project3 = ProjectFactory() - project4 = ProjectFactory() - project3.add_osf_group(group) - project4.add_osf_group(group) - project4.is_deleted = True - project3.save() - project4.save() - - RegistrationFactory( - project=project1, - creator=user_one, - is_public=True) - - url = f'/{API_BASE}users/?show_projects_in_common=true' - res = app.get(url, auth=user_two.auth) - user_json = res.json['data'] - for user in user_json: - if user['id'] == user_two._id: - meta = user['relationships']['nodes']['links']['related']['meta'] - assert 'projects_in_common' in meta - assert meta['projects_in_common'] == 4 - def test_users_projects_in_common(self, app, user_one, user_two): user_one.fullname = 'hello' user_one.save() @@ -289,6 +246,7 @@ def test_users_list_filter_multiple_fields_with_bad_filter( @pytest.mark.django_db +@pytest.mark.usefixtures('mock_send_grid') class TestUsersCreate: @pytest.fixture() @@ -320,9 +278,8 @@ def tearDown(self, app): app.reset() # clears cookies OSFUser.remove() - @mock.patch('framework.auth.views.mails.send_mail') def test_logged_in_user_with_basic_auth_cannot_create_other_user_or_send_mail( - self, mock_mail, app, user, email_unconfirmed, data, url_base): + self, mock_send_grid, app, user, email_unconfirmed, data, url_base): assert OSFUser.objects.filter(username=email_unconfirmed).count() == 0 res = app.post_json_api( f'{url_base}?send_email=true', @@ -333,11 +290,10 @@ def test_logged_in_user_with_basic_auth_cannot_create_other_user_or_send_mail( assert res.status_code == 403 assert OSFUser.objects.filter(username=email_unconfirmed).count() == 0 - assert mock_mail.call_count == 0 + assert mock_send_grid.call_count == 0 - @mock.patch('framework.auth.views.mails.send_mail') def test_logged_out_user_cannot_create_other_user_or_send_mail( - self, mock_mail, app, email_unconfirmed, data, url_base): + self, mock_send_grid, app, email_unconfirmed, data, url_base): assert OSFUser.objects.filter(username=email_unconfirmed).count() == 0 res = app.post_json_api( f'{url_base}?send_email=true', @@ -347,12 +303,11 @@ def test_logged_out_user_cannot_create_other_user_or_send_mail( assert res.status_code == 401 assert OSFUser.objects.filter(username=email_unconfirmed).count() == 0 - assert mock_mail.call_count == 0 + assert mock_send_grid.call_count == 0 @pytest.mark.skip # failing locally post converision - @mock.patch('framework.auth.views.mails.send_mail') def test_cookied_requests_can_create_and_email( - self, mock_mail, app, user, email_unconfirmed, data, url_base): + self, mock_send_grid, app, user, email_unconfirmed, data, url_base): # NOTE: skipped tests are not tested during session refactor, only updated to fix import session = SessionStore() session['auth_user_id'] = user._id @@ -367,17 +322,16 @@ def test_cookied_requests_can_create_and_email( ) assert res.status_code == 201 assert OSFUser.objects.filter(username=email_unconfirmed).count() == 1 - assert mock_mail.call_count == 1 + assert mock_send_grid.call_count == 1 @pytest.mark.skip # failing locally post converision - @mock.patch('framework.auth.views.mails.send_mail') @mock.patch('api.base.authentication.drf.OSFCASAuthentication.authenticate') # TODO: Remove when available outside of DEV_MODE @unittest.skipIf( not settings.DEV_MODE, 'DEV_MODE disabled, osf.users.create unavailable') def test_properly_scoped_token_can_create_and_send_email( - self, mock_auth, mock_mail, app, user, email_unconfirmed, data, url_base): + self, mock_auth, mock_send_grid, app, user, email_unconfirmed, data, url_base): token = ApiOAuth2PersonalToken( owner=user, name='Authorized Token', @@ -407,17 +361,16 @@ def test_properly_scoped_token_can_create_and_send_email( assert res.status_code == 201 assert res.json['data']['attributes']['username'] == email_unconfirmed assert OSFUser.objects.filter(username=email_unconfirmed).count() == 1 - assert mock_mail.call_count == 1 + assert mock_send_grid.call_count == 1 @pytest.mark.skip # failing locally post converision - @mock.patch('framework.auth.views.mails.send_mail') @mock.patch('api.base.authentication.drf.OSFCASAuthentication.authenticate') # TODO: Remove when available outside of DEV_MODE @unittest.skipIf( not settings.DEV_MODE, 'DEV_MODE disabled, osf.users.create unavailable') def test_properly_scoped_token_does_not_send_email_without_kwarg( - self, mock_auth, mock_mail, app, user, email_unconfirmed, data, url_base): + self, mock_auth, mock_send_grid, app, user, email_unconfirmed, data, url_base): token = ApiOAuth2PersonalToken( owner=user, name='Authorized Token', @@ -449,17 +402,16 @@ def test_properly_scoped_token_does_not_send_email_without_kwarg( assert res.status_code == 201 assert res.json['data']['attributes']['username'] == email_unconfirmed assert OSFUser.objects.filter(username=email_unconfirmed).count() == 1 - assert mock_mail.call_count == 0 + assert mock_send_grid.call_count == 0 @pytest.mark.skip # failing locally post converision - @mock.patch('framework.auth.views.mails.send_mail') @mock.patch('api.base.authentication.drf.OSFCASAuthentication.authenticate') # TODO: Remove when available outside of DEV_MODE @unittest.skipIf( not settings.DEV_MODE, 'DEV_MODE disabled, osf.users.create unavailable') def test_properly_scoped_token_can_create_without_username_but_not_send_email( - self, mock_auth, mock_mail, app, user, data, url_base): + self, mock_auth, mock_send_grid, app, user, data, url_base): token = ApiOAuth2PersonalToken( owner=user, name='Authorized Token', @@ -495,12 +447,11 @@ def test_properly_scoped_token_can_create_without_username_but_not_send_email( except ValueError: raise AssertionError('Username is not a valid UUID') assert OSFUser.objects.filter(fullname='No Email').count() == 1 - assert mock_mail.call_count == 0 + assert mock_send_grid.call_count == 0 - @mock.patch('framework.auth.views.mails.send_mail') @mock.patch('api.base.authentication.drf.OSFCASAuthentication.authenticate') def test_improperly_scoped_token_can_not_create_or_email( - self, mock_auth, mock_mail, app, user, email_unconfirmed, data, url_base): + self, mock_auth, mock_send_grid, app, user, email_unconfirmed, data, url_base): token = ApiOAuth2PersonalToken( owner=user, name='Unauthorized Token', @@ -532,17 +483,16 @@ def test_improperly_scoped_token_can_not_create_or_email( assert res.status_code == 403 assert OSFUser.objects.filter(username=email_unconfirmed).count() == 0 - assert mock_mail.call_count == 0 + assert mock_send_grid.call_count == 0 @pytest.mark.skip # failing locally post converision - @mock.patch('framework.auth.views.mails.send_mail') @mock.patch('api.base.authentication.drf.OSFCASAuthentication.authenticate') # TODO: Remove when available outside of DEV_MODE @unittest.skipIf( not settings.DEV_MODE, 'DEV_MODE disabled, osf.admin unavailable') def test_admin_scoped_token_can_create_and_send_email( - self, mock_auth, mock_mail, app, user, email_unconfirmed, data, url_base): + self, mock_auth, mock_send_grid, app, user, email_unconfirmed, data, url_base): token = ApiOAuth2PersonalToken( owner=user, name='Admin Token', @@ -572,4 +522,4 @@ def test_admin_scoped_token_can_create_and_send_email( assert res.status_code == 201 assert res.json['data']['attributes']['username'] == email_unconfirmed assert OSFUser.objects.filter(username=email_unconfirmed).count() == 1 - assert mock_mail.call_count == 1 + assert mock_send_grid.call_count == 1 diff --git a/api_tests/users/views/test_user_message_institutional_access.py b/api_tests/users/views/test_user_message_institutional_access.py index 36f2a59e252..2f60c4ae726 100644 --- a/api_tests/users/views/test_user_message_institutional_access.py +++ b/api_tests/users/views/test_user_message_institutional_access.py @@ -1,4 +1,3 @@ -from unittest import mock import pytest from osf.models.user_message import MessageTypes, UserMessage from api.base.settings.defaults import API_BASE @@ -6,11 +5,11 @@ AuthUserFactory, InstitutionFactory ) -from website.mails import USER_MESSAGE_INSTITUTIONAL_ACCESS_REQUEST from webtest import AppError @pytest.mark.django_db +@pytest.mark.usefixtures('mock_send_grid') class TestUserMessageInstitutionalAccess: """ Tests for `UserMessage`. @@ -85,12 +84,10 @@ def payload(self, institution, user): } } - @mock.patch('osf.models.user_message.send_mail') - def test_institutional_admin_can_create_message(self, mock_send_mail, app, institutional_admin, institution, url_with_affiliation, payload): + def test_institutional_admin_can_create_message(self, mock_send_grid, app, institutional_admin, institution, url_with_affiliation, payload): """ Ensure an institutional admin can create a `UserMessage` with a `message` and `institution`. """ - mock_send_mail.return_value = mock.MagicMock() res = app.post_json_api( url_with_affiliation, @@ -105,19 +102,16 @@ def test_institutional_admin_can_create_message(self, mock_send_mail, app, insti assert user_message.message_text == payload['data']['attributes']['message_text'] assert user_message.institution == institution - mock_send_mail.assert_called_once() - assert mock_send_mail.call_args[1]['to_addr'] == user_message.recipient.username - assert 'Requesting user access for collaboration' in mock_send_mail.call_args[1]['message_text'] + mock_send_grid.assert_called_once() + assert mock_send_grid.call_args[1]['to_addr'] == user_message.recipient.username assert user_message._id == data['id'] - @mock.patch('osf.models.user_message.send_mail') - def test_institutional_admin_can_not_create_message(self, mock_send_mail, app, institutional_admin_on_institution_without_access, + def test_institutional_admin_can_not_create_message(self, mock_send_grid, app, institutional_admin_on_institution_without_access, institution_without_access, url_with_affiliation_on_institution_without_access, payload): """ Ensure an institutional admin cannot create a `UserMessage` with a `message` and `institution` witch has 'institutional_request_access_enabled' as False """ - mock_send_mail.return_value = mock.MagicMock() # Use pytest.raises to explicitly expect the 403 error with pytest.raises(AppError) as exc_info: @@ -197,10 +191,9 @@ def test_admin_cannot_message_user_outside_institution( assert ('Cannot send to a recipient that is not affiliated with the provided institution.' in res.json['errors'][0]['detail']['user']) - @mock.patch('osf.models.user_message.send_mail') def test_cc_institutional_admin( self, - mock_send_mail, + mock_send_grid, app, institutional_admin, institution, @@ -211,7 +204,6 @@ def test_cc_institutional_admin( """ Ensure CC option works as expected, sending messages to all institutional admins except the sender. """ - mock_send_mail.return_value = mock.MagicMock() # Enable CC in the payload payload['data']['attributes']['bcc_sender'] = True @@ -227,20 +219,9 @@ def test_cc_institutional_admin( assert user_message.is_sender_BCCed # Two emails are sent during the CC but this is how the mock works `send_email` is called once. - mock_send_mail.assert_called_once_with( - to_addr=user_with_affiliation.username, - bcc_addr=[institutional_admin.username], - reply_to=None, - message_text='Requesting user access for collaboration', - mail=USER_MESSAGE_INSTITUTIONAL_ACCESS_REQUEST, - user=user_with_affiliation, - sender=institutional_admin, - recipient=user_with_affiliation, - institution=institution, - ) + assert mock_send_grid.call_args[1]['to_addr'] == user_with_affiliation.username - @mock.patch('osf.models.user_message.send_mail') - def test_cc_field_defaults_to_false(self, mock_send_mail, app, institutional_admin, url_with_affiliation, user_with_affiliation, institution, payload): + def test_cc_field_defaults_to_false(self, mock_send_grid, app, institutional_admin, url_with_affiliation, user_with_affiliation, institution, payload): """ Ensure the `cc` field defaults to `false` when not provided in the payload. """ @@ -249,20 +230,10 @@ def test_cc_field_defaults_to_false(self, mock_send_mail, app, institutional_adm user_message = UserMessage.objects.get(sender=institutional_admin) assert user_message.message_text == payload['data']['attributes']['message_text'] - mock_send_mail.assert_called_once_with( - to_addr=user_with_affiliation.username, - bcc_addr=None, - reply_to=None, - message_text='Requesting user access for collaboration', - mail=USER_MESSAGE_INSTITUTIONAL_ACCESS_REQUEST, - user=user_with_affiliation, - sender=institutional_admin, - recipient=user_with_affiliation, - institution=institution, - ) - @mock.patch('osf.models.user_message.send_mail') - def test_reply_to_header_set(self, mock_send_mail, app, institutional_admin, user_with_affiliation, institution, url_with_affiliation, payload): + assert mock_send_grid.call_args[1]['to_addr'] == user_with_affiliation.username + + def test_reply_to_header_set(self, mock_send_grid, app, institutional_admin, user_with_affiliation, institution, url_with_affiliation, payload): """ Ensure that the 'Reply-To' header is correctly set to the sender's email address. """ @@ -275,14 +246,4 @@ def test_reply_to_header_set(self, mock_send_mail, app, institutional_admin, use ) assert res.status_code == 201 - mock_send_mail.assert_called_once_with( - to_addr=user_with_affiliation.username, - bcc_addr=None, - reply_to=institutional_admin.username, - message_text='Requesting user access for collaboration', - mail=USER_MESSAGE_INSTITUTIONAL_ACCESS_REQUEST, - user=user_with_affiliation, - sender=institutional_admin, - recipient=user_with_affiliation, - institution=institution, - ) + assert mock_send_grid.call_args[1]['to_addr'] == user_with_affiliation.username diff --git a/api_tests/users/views/test_user_nodes_list.py b/api_tests/users/views/test_user_nodes_list.py index 8d39119e387..99deaf40a1e 100644 --- a/api_tests/users/views/test_user_nodes_list.py +++ b/api_tests/users/views/test_user_nodes_list.py @@ -7,7 +7,6 @@ from osf_tests.factories import ( AuthUserFactory, CollectionFactory, - OSFGroupFactory, PreprintFactory, ProjectFactory, RegistrationFactory, @@ -175,24 +174,6 @@ def test_user_nodes( assert public_project_user_one._id == ids[1] assert private_project_user_one._id == ids[0] - # test_osf_group_member_node_shows_up_in_user_nodes - group_mem = AuthUserFactory() - url = f'/{API_BASE}users/{group_mem._id}/nodes/' - res = app.get(url, auth=group_mem.auth) - assert len(res.json['data']) == 0 - - group = OSFGroupFactory(creator=group_mem) - private_project_user_one.add_osf_group(group, permissions.READ) - res = app.get(url, auth=group_mem.auth) - assert len(res.json['data']) == 1 - - res = app.get(url, auth=user_one.auth) - assert len(res.json['data']) == 1 - - private_project_user_one.delete() - res = app.get(url, auth=user_one.auth) - assert len(res.json['data']) == 0 - @pytest.mark.django_db class TestUserNodesPreprintsFiltering: @@ -320,27 +301,6 @@ def test_current_user_permissions_filter(self, app, url, contrib, no_perm_node, res = app.get(f'{url}null', auth=contrib.auth, expect_errors=True) assert res.status_code == 400 - user2 = AuthUserFactory() - osf_group = OSFGroupFactory(creator=user2) - read_node.add_osf_group(osf_group, permissions.READ) - write_node.add_osf_group(osf_group, permissions.WRITE) - admin_node.add_osf_group(osf_group, permissions.ADMIN) - - # test filter group member read - res = app.get(f'{url}read', auth=user2.auth) - assert len(res.json['data']) == 3 - assert {read_node._id, write_node._id, admin_node._id} == {node['id'] for node in res.json['data']} - - # test filter group member write - res = app.get(f'{url}write', auth=user2.auth) - assert len(res.json['data']) == 2 - assert {admin_node._id, write_node._id} == {node['id'] for node in res.json['data']} - - # test filter group member admin - res = app.get(f'{url}admin', auth=user2.auth) - assert len(res.json['data']) == 1 - assert [admin_node._id] == [node['id'] for node in res.json['data']] - def test_filter_my_current_user_permissions_to_other_users_nodes(self, app, contrib, no_perm_node, read_node, write_node, admin_node): url = f'/{API_BASE}users/{contrib._id}/nodes/?filter[current_user_permissions]=' diff --git a/api_tests/users/views/test_user_osf_groups_list.py b/api_tests/users/views/test_user_osf_groups_list.py deleted file mode 100644 index 47c92e726d2..00000000000 --- a/api_tests/users/views/test_user_osf_groups_list.py +++ /dev/null @@ -1,119 +0,0 @@ -import pytest -from waffle.testutils import override_flag - -from api.base.settings.defaults import API_BASE -from osf_tests.factories import ( - AuthUserFactory, - OSFGroupFactory, -) -from osf.features import OSF_GROUPS - - -@pytest.fixture() -def user(): - return AuthUserFactory() - -@pytest.fixture() -def manager(): - return AuthUserFactory() - -@pytest.fixture() -def member(): - return AuthUserFactory() - -@pytest.fixture() -def osf_group(manager, member): - group = OSFGroupFactory(name='Platform Team', creator=manager) - group.make_member(member) - return group - -@pytest.fixture() -def second_osf_group(manager, member): - group = OSFGroupFactory(name='Interfaces Team', creator=manager) - return group - - -@pytest.mark.django_db -class TestUserGroupList: - - @pytest.fixture() - def manager_url(self, manager): - return f'/{API_BASE}users/{manager._id}/groups/' - - @pytest.fixture() - def member_url(self, member): - return f'/{API_BASE}users/{member._id}/groups/' - - def test_return_manager_groups(self, app, member, manager, user, osf_group, second_osf_group, manager_url): - with override_flag(OSF_GROUPS, active=True): - # test nonauthenticated - res = app.get(manager_url) - assert res.status_code == 200 - ids = [group['id'] for group in res.json['data']] - assert len(ids) == 0 - - # test authenticated user - res = app.get(manager_url, auth=user.auth) - assert res.status_code == 200 - ids = [group['id'] for group in res.json['data']] - assert len(ids) == 0 - - # test authenticated member - res = app.get(manager_url, auth=member.auth) - assert res.status_code == 200 - ids = [group['id'] for group in res.json['data']] - assert len(ids) == 1 - - # test authenticated manager - res = app.get(manager_url, auth=manager.auth) - assert res.status_code == 200 - ids = [group['id'] for group in res.json['data']] - assert len(ids) == 2 - assert osf_group._id in ids - assert second_osf_group._id in ids - - def test_groups_filter(self, app, member, manager, user, osf_group, second_osf_group, manager_url): - with override_flag(OSF_GROUPS, active=True): - res = app.get(manager_url + '?filter[name]=Platform', auth=manager.auth) - assert res.status_code == 200 - data = res.json['data'] - assert len(data) == 1 - assert data[0]['id'] == osf_group._id - - res = app.get(manager_url + '?filter[name]=Apple', auth=manager.auth) - assert res.status_code == 200 - data = res.json['data'] - assert len(data) == 0 - - res = app.get(manager_url + '?filter[bad_field]=Apple', auth=manager.auth, expect_errors=True) - assert res.status_code == 400 - - def test_return_member_groups(self, app, member, manager, user, osf_group, second_osf_group, member_url): - with override_flag(OSF_GROUPS, active=True): - # test nonauthenticated - res = app.get(member_url) - assert res.status_code == 200 - data = res.json['data'] - assert len(data) == 0 - - # test authenticated user - res = app.get(member_url, auth=user.auth) - assert res.status_code == 200 - data = res.json['data'] - assert len(data) == 0 - - # test authenticated member - res = app.get(member_url, auth=member.auth) - assert res.status_code == 200 - data = res.json['data'] - assert len(data) == 1 - assert data[0]['id'] == osf_group._id - - # test authenticated manager - res = app.get(member_url, auth=manager.auth) - assert res.status_code == 200 - data = res.json['data'] - assert len(data) == 1 - assert data[0]['id'] == osf_group._id - assert data[0]['type'] == 'groups' - assert data[0]['attributes']['name'] == osf_group.name diff --git a/api_tests/users/views/test_user_registrations_list.py b/api_tests/users/views/test_user_registrations_list.py index 740d0263038..1ad5d0701ee 100644 --- a/api_tests/users/views/test_user_registrations_list.py +++ b/api_tests/users/views/test_user_registrations_list.py @@ -9,9 +9,7 @@ CollectionFactory, ProjectFactory, RegistrationFactory, - OSFGroupFactory ) -from osf.utils import permissions from tests.base import ApiTestCase from website.views import find_bookmark_collection @@ -30,14 +28,6 @@ def user_one(self): def user_two(self): return AuthUserFactory() - @pytest.fixture() - def group_member(self): - return AuthUserFactory() - - @pytest.fixture() - def osf_group(self, group_member): - return OSFGroupFactory(creator=group_member) - @pytest.fixture() def project_public_user_one(self, user_one): return ProjectFactory( @@ -66,16 +56,6 @@ def project_private_user_two(self, user_two): is_public=False, creator=user_two) - @pytest.fixture() - def project_private_group_member(self, user_one, osf_group): - project = ProjectFactory( - title='Private Project Group Member', - is_public=False, - creator=user_one - ) - project.add_osf_group(osf_group, permissions.ADMIN) - return project - @pytest.fixture() def project_deleted_user_one(self, user_one): return CollectionFactory( @@ -136,12 +116,11 @@ def reg_project_private_group_member(self, user_one, project_private_group_membe is_private=True) def test_user_registrations( - self, app, user_one, user_two, group_member, + self, app, user_one, user_two, reg_project_public_user_one, reg_project_public_user_two, reg_project_private_user_one, reg_project_private_user_two, - reg_project_private_group_member, folder, folder_deleted, project_deleted_user_one): @@ -199,22 +178,6 @@ def test_user_registrations( assert folder_deleted._id not in ids assert project_deleted_user_one._id not in ids - # test_get_registrations_logged_in_group_member - url = f'/{API_BASE}users/{group_member._id}/registrations/' - res = app.get(url, auth=group_member.auth) - node_json = res.json['data'] - - ids = [each['id'] for each in node_json] - assert reg_project_public_user_one._id not in ids - assert reg_project_private_user_one._id not in ids - assert reg_project_public_user_two._id not in ids - assert reg_project_private_user_two._id not in ids - assert folder._id not in ids - assert folder_deleted._id not in ids - assert project_deleted_user_one._id not in ids - # project group members not copied to registration. - assert reg_project_private_group_member not in ids - class TestRegistrationListFiltering( RegistrationListFilteringMixin, diff --git a/api_tests/users/views/test_user_settings.py b/api_tests/users/views/test_user_settings.py index cd4e25ff654..ec60c1f4c3d 100644 --- a/api_tests/users/views/test_user_settings.py +++ b/api_tests/users/views/test_user_settings.py @@ -12,7 +12,6 @@ from django.middleware import csrf from osf.models import Email, NotableDomain from framework.auth.views import auth_email_logout -from website import mails, settings @pytest.fixture() def user_one(): @@ -29,6 +28,7 @@ def unconfirmed_address(): @pytest.mark.django_db +@pytest.mark.usefixtures('mock_send_grid') class TestUserRequestExport: @pytest.fixture() @@ -48,8 +48,7 @@ def test_get(self, app, user_one, url): res = app.get(url, auth=user_one.auth, expect_errors=True) assert res.status_code == 405 - @mock.patch('framework.auth.views.mails.send_mail') - def test_post(self, mock_mail, app, user_one, user_two, url, payload): + def test_post(self, mock_send_grid, app, user_one, user_two, url, payload): # Logged out res = app.post_json_api(url, payload, expect_errors=True) assert res.status_code == 401 @@ -64,20 +63,18 @@ def test_post(self, mock_mail, app, user_one, user_two, url, payload): assert res.status_code == 204 user_one.reload() assert user_one.email_last_sent is not None - assert mock_mail.call_count == 1 + assert mock_send_grid.call_count == 1 - @mock.patch('framework.auth.views.mails.send_mail') - def test_post_invalid_type(self, mock_mail, app, user_one, url, payload): + def test_post_invalid_type(self, mock_send_grid, app, user_one, url, payload): assert user_one.email_last_sent is None payload['data']['type'] = 'Invalid Type' res = app.post_json_api(url, payload, auth=user_one.auth, expect_errors=True) assert res.status_code == 409 user_one.reload() assert user_one.email_last_sent is None - assert mock_mail.call_count == 0 + assert mock_send_grid.call_count == 0 - @mock.patch('framework.auth.views.mails.send_mail') - def test_exceed_throttle(self, mock_mail, app, user_one, url, payload): + def test_exceed_throttle(self, app, user_one, url, payload): assert user_one.email_last_sent is None res = app.post_json_api(url, payload, auth=user_one.auth) assert res.status_code == 204 @@ -171,6 +168,7 @@ def test_multiple_errors(self, app, user_one, url, payload): @pytest.mark.django_db +@pytest.mark.usefixtures('mock_send_grid') class TestResetPassword: @pytest.fixture() @@ -189,27 +187,20 @@ def url(self): def csrf_token(self): return csrf._mask_cipher_secret(csrf._get_new_csrf_string()) - def test_get(self, app, url, user_one): + def test_get(self, mock_send_grid, app, url, user_one): encoded_email = urllib.parse.quote(user_one.email) url = f'{url}?email={encoded_email}' - with mock.patch.object(mails, 'send_mail', return_value=None) as mock_send_mail: - res = app.get(url) - assert res.status_code == 200 + res = app.get(url) + assert res.status_code == 200 - user_one.reload() - mock_send_mail.assert_called_with( - to_addr=user_one.username, - mail=mails.FORGOT_PASSWORD, - reset_link=f'{settings.RESET_PASSWORD_URL}{user_one._id}/{user_one.verification_key_v2['token']}/', - can_change_preferences=False, - ) + user_one.reload() + assert mock_send_grid.call_args[1]['to_addr'] == user_one.username - def test_get_invalid_email(self, app, url): + def test_get_invalid_email(self, mock_send_grid, app, url): url = f'{url}?email={'invalid_email'}' - with mock.patch.object(mails, 'send_mail', return_value=None) as mock_send_mail: - res = app.get(url) - assert res.status_code == 200 - assert not mock_send_mail.called + res = app.get(url) + assert res.status_code == 200 + assert not mock_send_grid.called def test_post(self, app, url, user_one, csrf_token): app.set_cookie(CSRF_COOKIE_NAME, csrf_token) diff --git a/api_tests/users/views/test_user_settings_detail.py b/api_tests/users/views/test_user_settings_detail.py index cf9194409f6..cc02e6ae145 100644 --- a/api_tests/users/views/test_user_settings_detail.py +++ b/api_tests/users/views/test_user_settings_detail.py @@ -227,6 +227,7 @@ def test_unauthorized_patch_403(self, app, url, payload, user_two): @pytest.mark.django_db +@pytest.mark.usefixtures('mock_send_grid') class TestUpdateRequestedDeactivation: @pytest.fixture() @@ -241,8 +242,7 @@ def payload(self, user_one): } } - @mock.patch('framework.auth.views.mails.send_mail') - def test_patch_requested_deactivation(self, mock_mail, app, user_one, user_two, url, payload): + def test_patch_requested_deactivation(self, app, user_one, user_two, url, payload): # Logged out res = app.patch_json_api(url, payload, expect_errors=True) assert res.status_code == 401 @@ -271,18 +271,16 @@ def test_patch_requested_deactivation(self, mock_mail, app, user_one, user_two, user_one.reload() assert user_one.requested_deactivation is False - @mock.patch('framework.auth.views.mails.send_mail') - def test_patch_invalid_type(self, mock_mail, app, user_one, url, payload): + def test_patch_invalid_type(self, mock_send_grid, app, user_one, url, payload): assert user_one.email_last_sent is None payload['data']['type'] = 'Invalid Type' res = app.patch_json_api(url, payload, auth=user_one.auth, expect_errors=True) assert res.status_code == 409 user_one.reload() assert user_one.email_last_sent is None - assert mock_mail.call_count == 0 + assert mock_send_grid.call_count == 0 - @mock.patch('framework.auth.views.mails.send_mail') - def test_exceed_throttle(self, mock_mail, app, user_one, url, payload): + def test_exceed_throttle(self, app, user_one, url, payload): assert user_one.email_last_sent is None res = app.patch_json_api(url, payload, auth=user_one.auth) assert res.status_code == 200 diff --git a/api_tests/wb/views/test_wb_hooks.py b/api_tests/wb/views/test_wb_hooks.py index 20c09b14e69..1de111b9b82 100644 --- a/api_tests/wb/views/test_wb_hooks.py +++ b/api_tests/wb/views/test_wb_hooks.py @@ -1,6 +1,5 @@ import pytest -from addons.osfstorage.models import OsfStorageFolder from framework.auth import signing from osf_tests.factories import ( @@ -9,26 +8,12 @@ PreprintFactory ) from api_tests.utils import create_test_file, create_test_preprint_file -from osf.models import QuickFilesNode @pytest.fixture() def user(): return AuthUserFactory() -@pytest.fixture() -def quickfiles_node(user): - return QuickFilesNode.objects.get_for_user(user) - -@pytest.fixture() -def quickfiles_file(user, quickfiles_node): - file = create_test_file(quickfiles_node, user, filename='road_dogg.mp3') - return file - -@pytest.fixture() -def quickfiles_folder(quickfiles_node): - return OsfStorageFolder.objects.get_root(target=quickfiles_node) - @pytest.fixture() def node(user): return ProjectFactory(creator=user) @@ -72,10 +57,6 @@ class TestMove(): def move_url(self, node): return f'/_/wb/hooks/{node._id}/move/' - @pytest.fixture() - def quickfiles_move_url(self, quickfiles_node): - return f'/_/wb/hooks/{quickfiles_node._id}/move/' - @pytest.fixture() def payload(self, file, folder, root_node, user): return { @@ -568,10 +549,6 @@ class TestCopy(): def copy_url(self, node): return f'/_/wb/hooks/{node._id}/copy/' - @pytest.fixture() - def quickfiles_copy_url(self, quickfiles_node): - return f'/_/wb/hooks/{quickfiles_node._id}/copy/' - @pytest.fixture() def payload(self, file, folder, root_node, user): return { diff --git a/conftest.py b/conftest.py index 562d04aeb27..a65aa7aa50f 100644 --- a/conftest.py +++ b/conftest.py @@ -19,6 +19,10 @@ from osf.external.spam import tasks as spam_tasks from website import settings as website_settings +def pytest_configure(config): + if not os.getenv('GITHUB_ACTIONS') == 'true': + config.option.allow_hosts += ',mailhog' + logger = logging.getLogger(__name__) @@ -358,6 +362,22 @@ def helpful_thing(self): """ yield from rolledback_transaction('function_transaction') +@pytest.fixture() +def mock_send_grid(): + with mock.patch.object(website_settings, 'USE_EMAIL', True): + with mock.patch.object(website_settings, 'USE_CELERY', False): + with mock.patch('framework.email.tasks.send_email') as mock_sendgrid: + mock_sendgrid.return_value = True + yield mock_sendgrid + + +def start_mock_send_grid(test_case): + patcher = mock.patch('framework.email.tasks.send_email') + mocked_send = patcher.start() + test_case.addCleanup(patcher.stop) + mocked_send.return_value = True + return mocked_send + @pytest.fixture def mock_gravy_valet_get_verified_links(): """This fixture is used to mock a GV request which is made during node's identifier update. More specifically, when diff --git a/docker-compose.yml b/docker-compose.yml index 14ed365e611..e9ba66bc37e 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -607,3 +607,11 @@ services: stdin_open: true volumes: - /srv + + mailhog: + image: mailhog/mailhog + container_name: mailhog + ports: + - "1025:1025" # SMTP + - "8025:8025" # Web UI + restart: unless-stopped diff --git a/framework/auth/oauth_scopes.py b/framework/auth/oauth_scopes.py index 65811b9a981..0f3f3c7c1f5 100644 --- a/framework/auth/oauth_scopes.py +++ b/framework/auth/oauth_scopes.py @@ -63,9 +63,6 @@ class CoreScopes: NODE_CONTRIBUTORS_READ = 'nodes.contributors_read' NODE_CONTRIBUTORS_WRITE = 'nodes.contributors_write' - OSF_GROUPS_READ = 'osf_groups.groups_read' - OSF_GROUPS_WRITE = 'osf_groups.groups_write' - PREPRINT_CONTRIBUTORS_READ = 'preprints.contributors_read' PREPRINT_CONTRIBUTORS_WRITE = 'preprints.contributors_write' @@ -90,9 +87,6 @@ class CoreScopes: NODE_PREPRINTS_READ = 'node.preprints_read' NODE_PREPRINTS_WRITE = 'node.preprints_write' - NODE_OSF_GROUPS_READ = 'node.osf_groups_read' - NODE_OSF_GROUPS_WRITE = 'node.osf_groups_write' - PREPRINTS_READ = 'preprint.preprints_read' PREPRINTS_WRITE = 'preprint.preprints_write' @@ -251,10 +245,6 @@ class ComposedScopes: DRAFT_READ = (CoreScopes.NODE_DRAFT_REGISTRATIONS_READ, CoreScopes.DRAFT_REGISTRATIONS_READ, CoreScopes.DRAFT_CONTRIBUTORS_READ) DRAFT_WRITE = (CoreScopes.NODE_DRAFT_REGISTRATIONS_WRITE, CoreScopes.DRAFT_REGISTRATIONS_WRITE, CoreScopes.DRAFT_CONTRIBUTORS_WRITE) - # OSF Groups - GROUP_READ = (CoreScopes.OSF_GROUPS_READ, ) - GROUP_WRITE = (CoreScopes.OSF_GROUPS_WRITE, ) - # Identifier views IDENTIFIERS_READ = (CoreScopes.IDENTIFIERS_READ, ) IDENTIFIERS_WRITE = (CoreScopes.IDENTIFIERS_WRITE, ) @@ -302,7 +292,7 @@ class ComposedScopes: # Privileges relating to who can access a node (via contributors or registrations) NODE_ACCESS_READ = (CoreScopes.NODE_CONTRIBUTORS_READ, CoreScopes.NODE_REGISTRATIONS_READ, CoreScopes.NODE_VIEW_ONLY_LINKS_READ, CoreScopes.REGISTRATION_VIEW_ONLY_LINKS_READ, - CoreScopes.NODE_REQUESTS_READ, CoreScopes.NODE_SETTINGS_READ, CoreScopes.NODE_OSF_GROUPS_READ) + CoreScopes.NODE_REQUESTS_READ, CoreScopes.NODE_SETTINGS_READ) NODE_ACCESS_WRITE = NODE_ACCESS_READ + \ ( CoreScopes.NODE_CONTRIBUTORS_WRITE, @@ -311,7 +301,6 @@ class ComposedScopes: CoreScopes.REGISTRATION_VIEW_ONLY_LINKS_WRITE, CoreScopes.NODE_REQUESTS_WRITE, CoreScopes.NODE_SETTINGS_WRITE, - CoreScopes.NODE_OSF_GROUPS_WRITE, CoreScopes.SANCTION_RESPONSE, ) @@ -341,7 +330,6 @@ class ComposedScopes: + DRAFT_READ\ + REVIEWS_READ\ + PREPRINT_ALL_READ\ - + GROUP_READ\ + ( CoreScopes.CEDAR_METADATA_RECORD_READ, CoreScopes.MEETINGS_READ, @@ -363,7 +351,6 @@ class ComposedScopes: + DRAFT_WRITE\ + REVIEWS_WRITE\ + PREPRINT_ALL_WRITE\ - + GROUP_WRITE\ + TOKENS_WRITE\ + APPLICATIONS_WRITE\ + ( diff --git a/notifications.yaml b/notifications.yaml new file mode 100644 index 00000000000..a86820be248 --- /dev/null +++ b/notifications.yaml @@ -0,0 +1,124 @@ +# This file contains the configuration for our notification system using the NotificationType object, this is intended to +# exist as a simple declarative list of NotificationTypes and their attributes. Every notification sent by OSF should be +# represented here for bussiness logic dnd metrics reasons. + +# Workflow: +# 1. Add a new notification template +# 2. Add a entry here with the desired notification types +# 3. Add name tp Enum osf.notification.NotificationType.Type +# 4. Use the emit method to send or subscribe the notification for immediate deliver or periodic digest. +notification_types: + #### GLOBAL (User Notifications) + - name: user_pending_verification_registered + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + #### PROVIDER + - name: new_pending_submissions + __docs__: ... + object_content_type_model_name: abstractprovider + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: new_pending_withdraw_requests + __docs__: ... + object_content_type_model_name: abstractprovider + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + #### NODE + - name: file_updated + __docs__: ... + object_content_type_model_name: abstractnode + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: wiki_updated + __docs__: ... + object_content_type_model_name: abstractnode + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: node_contributor_added_access_request + __docs__: ... + object_content_type_model_name: abstractnode + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: node_request_institutional_access_request + __docs__: ... + object_content_type_model_name: abstractnode + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + + #### PREPRINT + - name: pending_retraction_admin + __docs__: ... + object_content_type_model_name: preprint + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + + #### SUPPORT + - name: crossref_error + __docs__: ... + object_content_type_model_name: abstractnode + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + #### Collection Submissions + - name: collection_submission_removed_moderator + __docs__: ... + object_content_type_model_name: collectionsubmission + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: collection_submission_removed_private + __docs__: ... + object_content_type_model_name: collectionsubmission + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: collection_submission_removed_admin + __docs__: ... + object_content_type_model_name: collectionsubmission + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: collection_submission_submitted + __docs__: ... + object_content_type_model_name: collectionsubmission + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: collection_submission_cancel + __docs__: ... + object_content_type_model_name: collectionsubmission + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: collection_submission_accepted + __docs__: ... + object_content_type_model_name: collectionsubmission + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: collection_submission_rejected + __docs__: ... + object_content_type_model_name: collectionsubmission + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + #### DESK + - name: desk_archive_job_exceeded + __docs__: Archive job failed due to size exceeded. Sent to support desk. + object_content_type_model_name: desk + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: desk_archive_job_copy_error + __docs__: Archive job failed due to copy error. Sent to support desk. + object_content_type_model_name: desk + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: desk_archive_job_file_not_found + __docs__: Archive job failed because files were not found. Sent to support desk. + object_content_type_model_name: desk + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: desk_archive_job_uncaught_error + __docs__: Archive job failed due to an uncaught error. Sent to support desk. + object_content_type_model_name: desk + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: desk_osf_support_email + __docs__: ... + object_content_type_model_name: desk + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly diff --git a/osf/admin.py b/osf/admin.py index 71c0ae8172b..b94c168c5be 100644 --- a/osf/admin.py +++ b/osf/admin.py @@ -24,18 +24,18 @@ class OSFUserAdmin(admin.ModelAdmin): def formfield_for_manytomany(self, db_field, request, **kwargs): """ - Restricts preprint/node/osfgroup django groups from showing up in the user's groups list in the admin app + Restricts preprint/node django groups from showing up in the user's groups list in the admin app """ if db_field.name == 'groups': - kwargs['queryset'] = Group.objects.exclude(Q(name__startswith='preprint_') | Q(name__startswith='node_') | Q(name__startswith='osfgroup_') | Q(name__startswith='collections_')) + kwargs['queryset'] = Group.objects.exclude(Q(name__startswith='preprint_') | Q(name__startswith='node_') | Q(name__startswith='collections_')) return super().formfield_for_manytomany(db_field, request, **kwargs) def save_related(self, request, form, formsets, change): """ - Since m2m fields overridden with new form data in admin app, preprint groups/node/osfgroup groups (which are now excluded from being selections) + Since m2m fields overridden with new form data in admin app, preprint groups/node groups (which are now excluded from being selections) are removed. Manually re-adds preprint/node groups after adding new groups in form. """ - groups_to_preserve = list(form.instance.groups.filter(Q(name__startswith='preprint_') | Q(name__startswith='node_') | Q(name__startswith='osfgroup_') | Q(name__startswith='collections_'))) + groups_to_preserve = list(form.instance.groups.filter(Q(name__startswith='preprint_') | Q(name__startswith='node_') | Q(name__startswith='collections_'))) super().save_related(request, form, formsets, change) if 'groups' in form.cleaned_data: for group in groups_to_preserve: diff --git a/osf/email/__init__.py b/osf/email/__init__.py new file mode 100644 index 00000000000..d8cc1d6de5a --- /dev/null +++ b/osf/email/__init__.py @@ -0,0 +1,68 @@ +import logging +import smtplib +from email.mime.text import MIMEText +from sendgrid import SendGridAPIClient +from sendgrid.helpers.mail import Mail +from website import settings + +def send_email_over_smtp(to_addr, notification_type, context): + """Send an email notification using SMTP. This is typically not used in productions as other 3rd party mail services + are preferred. This is to be used for tests and on staging environments and special situations. + + Args: + to_addr (str): The recipient's email address. + notification_type (str): The subject of the notification. + context (dict): The email content context. + """ + if not settings.MAIL_SERVER: + raise NotImplementedError('MAIL_SERVER is not set') + if not settings.MAIL_USERNAME and settings.MAIL_PASSWORD: + raise NotImplementedError('MAIL_USERNAME and MAIL_PASSWORD are required for STMP') + + msg = MIMEText( + notification_type.template.format(context), + 'html', + _charset='utf-8' + ) + msg['Subject'] = notification_type.email_subject_line_template.format(context=context) + + with smtplib.SMTP(settings.MAIL_SERVER) as server: + server.ehlo() + server.starttls() + server.ehlo() + server.login(settings.MAIL_USERNAME, settings.MAIL_PASSWORD) + server.sendmail( + settings.FROM_EMAIL, + [to_addr], + msg.as_string() + ) + + +def send_email_with_send_grid(to_addr, notification_type, context): + """Send an email notification using SendGrid. + + Args: + to_addr (str): The recipient's email address. + notification_type (str): The subject of the notification. + context (dict): The email content context. + """ + if not settings.SENDGRID_API_KEY: + raise NotImplementedError('SENDGRID_API_KEY is required for sendgrid notifications.') + + message = Mail( + from_email=settings.FROM_EMAIL, + to_emails=to_addr, + subject=notification_type, + html_content=context.get('message', '') + ) + + try: + sg = SendGridAPIClient(settings.SENDGRID_API_KEY) + response = sg.send(message) + if response.status_code not in (200, 201, 202): + logging.error(f'SendGrid response error: {response.status_code}, body: {response.body}') + response.raise_for_status() + logging.info(f'Notification email sent to {to_addr} for {notification_type}.') + except Exception as exc: + logging.error(f'Failed to send email notification to {to_addr}: {exc}') + raise exc diff --git a/osf/features.yaml b/osf/features.yaml index 85993ef1ba8..98eff0aef9a 100644 --- a/osf/features.yaml +++ b/osf/features.yaml @@ -242,3 +242,9 @@ switches: name: countedusage_unified_metrics_2024 note: use only `osf.metrics.counted_usage`-based metrics where possible; un-use PageCounter, PreprintView, PreprintDownload, etc active: false + + - flag_name: ENABLE_MAILHOG + name: enable_mailhog + note: This is used to enable the MailHog email testing service, this will allow emails to be sent to the + MailHog service before sending them to real email addresses. + active: false diff --git a/osf/management/commands/add_notification_subscription.py b/osf/management/commands/add_notification_subscription.py index 7d9a404f37a..46c0a17ec30 100644 --- a/osf/management/commands/add_notification_subscription.py +++ b/osf/management/commands/add_notification_subscription.py @@ -5,6 +5,7 @@ import logging import django + django.setup() from django.core.management.base import BaseCommand @@ -20,9 +21,9 @@ def add_reviews_notification_setting(notification_type, state=None): if state: OSFUser = state.get_model('osf', 'OSFUser') - NotificationSubscription = state.get_model('osf', 'NotificationSubscription') + NotificationSubscriptionLegacy = state.get_model('osf', 'NotificationSubscriptionLegacy') else: - from osf.models import OSFUser, NotificationSubscription + from osf.models import OSFUser, NotificationSubscriptionLegacy active_users = OSFUser.objects.filter(date_confirmed__isnull=False).exclude(date_disabled__isnull=False).exclude(is_active=False).order_by('id') total_active_users = active_users.count() @@ -33,10 +34,10 @@ def add_reviews_notification_setting(notification_type, state=None): for user in active_users.iterator(): user_subscription_id = to_subscription_key(user._id, notification_type) - subscription = NotificationSubscription.load(user_subscription_id) + subscription = NotificationSubscriptionLegacy.load(user_subscription_id) if not subscription: logger.info(f'No {notification_type} subscription found for user {user._id}. Subscribing...') - subscription = NotificationSubscription(_id=user_subscription_id, owner=user, event_name=notification_type) + subscription = NotificationSubscriptionLegacy(_id=user_subscription_id, owner=user, event_name=notification_type) subscription.save() # Need to save in order to access m2m fields subscription.add_user_to_subscription(user, 'email_transactional') else: diff --git a/osf/management/commands/data_storage_usage.py b/osf/management/commands/data_storage_usage.py index 00af3c7246f..2a5ac1bf19b 100644 --- a/osf/management/commands/data_storage_usage.py +++ b/osf/management/commands/data_storage_usage.py @@ -139,23 +139,6 @@ GROUP BY node.type, node.is_public """ -# Aggregation of non-deleted quick file sizes (NOTE: This will break when QuickFolders is merged) -ND_QUICK_FILE_SIZE_SUM_SQL = """ - SELECT - node.type, sum(size) - FROM osf_basefileversionsthrough AS obfnv - LEFT JOIN osf_basefilenode file ON obfnv.basefilenode_id = file.id - LEFT JOIN osf_fileversion version ON obfnv.fileversion_id = version.id - LEFT JOIN osf_abstractnode node ON file.target_object_id = node.id - WHERE file.provider = 'osfstorage' AND file.target_content_type_id = %s - AND node.type = 'osf.quickfilesnode' - AND node.is_deleted = False - AND file.deleted_on IS NULL - AND obfnv.id >= %s AND obfnv.id <= %s - GROUP BY node.type - - """ - # Aggregation of size of non-deleted files in preprint supplemental nodes based on the node query above ND_PREPRINT_SUPPLEMENT_SIZE_SUM_SQL = """ SELECT @@ -320,16 +303,6 @@ def gather_usage_data(start, end, dry_run, zip_file): cursor=cursor, )) - # TODO: Move the next when Quick Folders is done - logger.debug(f'Gathering quickfile summary at {datetime.datetime.now()}') - summary_data = combine_summary_data(summary_data, summarize( - sql=ND_QUICK_FILE_SIZE_SUM_SQL, - content_type=abstractnode_content_type, - start=start, - end=end, - cursor=cursor, - )) - logger.debug(f'Gathering supplement summary at {datetime.datetime.now()}') summary_data = combine_summary_data(summary_data, summarize( sql=ND_PREPRINT_SUPPLEMENT_SIZE_SUM_SQL, diff --git a/osf/management/commands/delete_legacy_quickfiles_nodes.py b/osf/management/commands/delete_legacy_quickfiles_nodes.py deleted file mode 100644 index ee69ffd37ab..00000000000 --- a/osf/management/commands/delete_legacy_quickfiles_nodes.py +++ /dev/null @@ -1,58 +0,0 @@ -import logging -from django.db import transaction -from django.utils import timezone -from django.core.management.base import BaseCommand -from framework import sentry -from framework.celery_tasks import app as celery_app - -from osf.models import QuickFilesNode, Node -logger = logging.getLogger(__name__) - - -@celery_app.task(name='osf.management.commands.delete_legacy_quickfiles_nodes') -def delete_quickfiles(batch_size=1000, dry_run=False): - """ - This is a periodic command to sunset our Quickfiles feature and can be safely deleted after - Quickfiles are all marked as deleted. - """ - with transaction.atomic(): - i = 0 - for i, node in enumerate(QuickFilesNode.objects.all()[:batch_size]): - node.is_deleted = True - node.deleted = timezone.now() - node.recast(Node._typedmodels_type) - node.save() - - logger.info(f'{i} Quickfiles deleted') - - if dry_run: - raise RuntimeError('dry run rolling back changes') - - if not QuickFilesNode.objects.exists(): - sentry.log_message('Clean-up complete, none more QuickFilesNode delete this task.') - - -class Command(BaseCommand): - """ - Deletes unused legacy Quickfiles. - """ - def add_arguments(self, parser): - super().add_arguments(parser) - parser.add_argument( - '--dry', - action='store_true', - dest='dry_run', - help='Run migration and roll back changes to db', - required=False, - ) - parser.add_argument( - '--batch_size', - type=int, - help='how many many Quickfiles are we deleting tonight?', - required=True, - ) - - def handle(self, *args, **options): - dry_run = options.get('dry_run', False) - batch_size = options.get('batch_size', 1000) - delete_quickfiles(batch_size, dry_run) diff --git a/osf/management/commands/export_user_account.py b/osf/management/commands/export_user_account.py index deb299c004a..027b421ec67 100644 --- a/osf/management/commands/export_user_account.py +++ b/osf/management/commands/export_user_account.py @@ -20,8 +20,7 @@ FileVersion, OSFUser, Preprint, - Registration, - QuickFilesNode + Registration ) from osf.utils.workflows import DefaultStates from scripts.utils import Progress @@ -146,7 +145,7 @@ def export_resource(node, user, current_dir): def export_resources(nodes_to_export, user, dir, nodes_type): """ Creates appropriate directory structure and exports a given set of resources - (projects, registrations, quickfiles or preprints) by calling export helper functions. + (projects, registrations or preprints) by calling export helper functions. """ progress = Progress() @@ -159,7 +158,7 @@ def export_resources(nodes_to_export, user, dir, nodes_type): progress.stop() def get_usage(user): - # includes nodes, registrations, quickfiles + # includes nodes, registrations nodes = user.nodes.filter(is_deleted=False).exclude(type='osf.collection').values_list('id', flat=True) node_ctype = ContentType.objects.get_for_model(AbstractNode) node_files = get_resource_files(nodes, node_ctype) @@ -214,12 +213,6 @@ def export_account(user_id, path, only_private=False, only_admin=False, export_f registrations/ *same as projects* - quickfiles/ - / - metadata.json - files/ - osfstorage-archive.zip - """ user = OSFUser.objects.get(guids___id=user_id, guids___id__isnull=False) proceed = input(f'\nUser has {get_usage(user):.2f} GB of data in OSFStorage that will be exported.\nWould you like to continue? [y/n] ') @@ -231,13 +224,11 @@ def export_account(user_id, path, only_private=False, only_admin=False, export_f preprints_dir = os.path.join(base_dir, 'preprints') projects_dir = os.path.join(base_dir, 'projects') registrations_dir = os.path.join(base_dir, 'registrations') - quickfiles_dir = os.path.join(base_dir, 'quickfiles') os.mkdir(base_dir) os.mkdir(preprints_dir) os.mkdir(projects_dir) os.mkdir(registrations_dir) - os.mkdir(quickfiles_dir) preprints_to_export = get_preprints_to_export(user) @@ -251,14 +242,9 @@ def export_account(user_id, path, only_private=False, only_admin=False, export_f .get_roots() ) - quickfiles_to_export = ( - QuickFilesNode.objects.filter(creator=user) - ) - export_resources(projects_to_export, user, projects_dir, 'projects') export_resources(preprints_to_export, user, preprints_dir, 'preprints') export_resources(registrations_to_export, user, registrations_dir, 'registrations') - export_resources(quickfiles_to_export, user, quickfiles_dir, 'quickfiles') timestamp = dt.datetime.fromtimestamp(time.time()).strftime('%Y%m%d%H%M%S') output = os.path.join(path, f'{user_id}-export-{timestamp}') diff --git a/osf/management/commands/fix_quickfiles_waterbutler_logs.py b/osf/management/commands/fix_quickfiles_waterbutler_logs.py deleted file mode 100644 index 904bfa82c84..00000000000 --- a/osf/management/commands/fix_quickfiles_waterbutler_logs.py +++ /dev/null @@ -1,140 +0,0 @@ -import logging - -from django.core.management.base import BaseCommand -from django.db import transaction -from osf.models import Node, NodeLog -from framework.celery_tasks import app as celery_app -from urllib.parse import urljoin -from website import settings - -logger = logging.getLogger(__name__) - - -def swap_guid(url, node): - url = url.split('/')[:-1] - url[2] = node._id - url = '/'.join(url) - return f'{url}/?pid={node._id}' - - -def swap_guid_view_download(url, node): - url = url.split('/')[:-1] - url[1] = node._id - url = '/'.join(url) - url = url.partition('?pid=')[0] + f'/?pid={node._id}' - return url - - -error_causing_log_actions = { - 'addon_file_renamed', - 'addon_file_moved', - 'addon_file_copied', -} - -dead_links_actions = { - 'osf_storage_file_added', - 'file_tag_removed', - 'file_tag_added', - 'osf_storage_file_removed', - 'osf_storage_file_updated', -} - -affected_log_actions = error_causing_log_actions.union(dead_links_actions) - - -@celery_app.task(name='osf.management.commands.fix_quickfiles_waterbutler_logs') -def fix_logs(node_id, dry_run=False): - ''' - Fixes view/download links for waterbutler based file logs, and also fixes old 10 digit node params for moved/renamed - files. - ''' - logger.info(f'{node_id} Quickfiles logs started') - - with transaction.atomic(): - logger.debug(f'{node_id} Quickfiles logs started') - - node = Node.load(node_id) - for log in node.logs.filter(action__in=error_causing_log_actions): - log.params['params_node'] = { - '_id': node._id, - 'title': node.title - } - if log.params.get('auth'): - log.params['auth']['callback_url'] = urljoin( - settings.DOMAIN, - f'project/{node_id}/node/{node_id}/waterbutler/logs/' - ) - - url = swap_guid(log.params['source']['url'], node) - - if log.params['source']['resource'] == log.params['destination']['resource']: - log.params['source']['url'] = url - log.params['source']['nid'] = node._id - if log.params['source'].get('node'): - log.params['source']['node']['url'] = f'/{node._id}/' - log.params['source']['node']['_id'] = node._id - if log.params['source'].get('resource'): - log.params['source']['resource'] = node._id - - log.params['destination']['url'] = url - log.params['destination']['nid'] = node._id - - if log.params['destination'].get('node'): - log.params['destination']['node']['url'] = f'/{node._id}/' - log.params['destination']['node']['_id'] = node._id - - if log.params['destination'].get('resource'): - log.params['destination']['resource'] = node._id - - if log.params.get('urls'): - url = swap_guid_view_download(log.params['urls']['view'], node) - log.params['urls'] = { - 'view': url, - 'download': f'{url}&action=download' - } - - log.save() - - for log in node.logs.filter(action__in=dead_links_actions): - log.params['params_node'] = { - '_id': node._id, - 'title': node.title - } - - url = swap_guid_view_download(log.params['urls']['view'], node) - - log.params['urls'] = { - 'view': url, - 'download': f'{url}&action=download' - } - log.save() - - node.save() - if dry_run: - raise RuntimeError('This was a dry run.') - - logger.info(f'{node._id} Quickfiles logs fixed') - - -def fix_quickfiles_waterbutler_logs(dry_run=False): - nodes = Node.objects.filter(logs__action=NodeLog.MIGRATED_QUICK_FILES).values_list('guids___id', flat=True) - logger.info(f'{nodes.count()} Quickfiles nodes with bugged logs found.') - - for node_id in nodes: - logger.info(f'{node_id} Quickfiles logs fixing started') - fix_logs.apply_async(args=(node_id,), kwargs={'dry_run': dry_run}) - - -class Command(BaseCommand): - def add_arguments(self, parser): - super().add_arguments(parser) - parser.add_argument( - '--dry', - action='store_true', - dest='dry_run', - help='Dry run', - ) - - def handle(self, *args, **options): - dry_run = options.get('dry_run') - fix_quickfiles_waterbutler_logs(dry_run=dry_run) diff --git a/osf/management/commands/metrics_backfill_summaries.py b/osf/management/commands/metrics_backfill_summaries.py index 0edd4e6810d..d259e9b2a52 100644 --- a/osf/management/commands/metrics_backfill_summaries.py +++ b/osf/management/commands/metrics_backfill_summaries.py @@ -78,22 +78,20 @@ def _map_download_count(row): def _map_file_summary(row): # date(keen.timestamp) => _source.report_date # "2022-12-30", # keen.created_at => _source.timestamp # "2023-01-02T14:59:04.397056+00:00" - # osfstorage_files_including_quickfiles.total => _source.files.total # 12272, - # osfstorage_files_including_quickfiles.public => _source.files.public # 126, - # osfstorage_files_including_quickfiles.private => _source.files.private # 12146, - # osfstorage_files_including_quickfiles.total_daily => _source.files.total_daily # 0, - # osfstorage_files_including_quickfiles.public_daily => _source.files.public_daily # 0, - # osfstorage_files_including_quickfiles.private_daily => _source.files.private_daily # 0 + # osfstorage_files.private => _source.files.private # 12146, + # osfstorage_files.total_daily => _source.files.total_daily # 0, + # osfstorage_files.public_daily => _source.files.public_daily # 0, + # osfstorage_files.private_daily => _source.files.private_daily # 0 return { 'report_date': _timestamp_to_date(row['keen.timestamp']), 'timestamp': _timestamp_to_dt(row['keen.created_at']), 'files': { - 'total': int(row['osfstorage_files_including_quickfiles.total']), - 'public': int(row['osfstorage_files_including_quickfiles.public']), - 'private': int(row['osfstorage_files_including_quickfiles.private']), - 'total_daily': int(row['osfstorage_files_including_quickfiles.total_daily']), - 'public_daily': int(row['osfstorage_files_including_quickfiles.public_daily']), - 'private_daily': int(row['osfstorage_files_including_quickfiles.private_daily']), + 'total': int(row['osfstorage_files.total']), + 'public': int(row['osfstorage_files.public']), + 'private': int(row['osfstorage_files.private']), + 'total_daily': int(row['osfstorage_files.total_daily']), + 'public_daily': int(row['osfstorage_files.public_daily']), + 'private_daily': int(row['osfstorage_files.private_daily']), }, } diff --git a/osf/management/commands/migrate_notifications.py b/osf/management/commands/migrate_notifications.py new file mode 100644 index 00000000000..8b7c1fe2a5e --- /dev/null +++ b/osf/management/commands/migrate_notifications.py @@ -0,0 +1,115 @@ +import yaml +from django.apps import apps +from website import settings + +import logging +from django.contrib.contenttypes.models import ContentType +from osf.models import NotificationType, NotificationSubscription +from osf.models.notifications import NotificationSubscriptionLegacy +from django.core.management.base import BaseCommand +from django.db import transaction + +logger = logging.getLogger(__name__) + +FREQ_MAP = { + 'none': 'none', + 'email_digest': 'weekly', + 'email_transactional': 'instantly', +} + +def migrate_legacy_notification_subscriptions(*args, **kwargs): + """ + Migrate legacy NotificationSubscription data to new notifications app. + """ + logger.info('Beginning legacy notification subscription migration...') + + PROVIDER_BASED_LEGACY_NOTIFICATION_TYPES = [ + 'new_pending_submissions', 'new_pending_withdraw_requests' + ] + + for legacy in NotificationSubscriptionLegacy.objects.all(): + event_name = legacy.event_name + if event_name in PROVIDER_BASED_LEGACY_NOTIFICATION_TYPES: + subscribed_object = legacy.provider + elif subscribed_object := legacy.node: + pass + elif subscribed_object := legacy.user: + pass + else: + raise NotImplementedError(f'Invalid Notification id {event_name}') + content_type = ContentType.objects.get_for_model(subscribed_object.__class__) + subscription, _ = NotificationSubscription.objects.update_or_create( + notification_type=NotificationType.objects.get(name=event_name), + user=legacy.user, + content_type=content_type, + object_id=subscribed_object.id, + defaults={ + 'user': legacy.user, + 'message_frequency': ( + ('weekly' if legacy.email_digest.exists() else 'none'), + 'instantly' if legacy.email_transactional.exists() else 'none' + ), + 'content_type': content_type, + 'object_id': subscribed_object.id, + } + ) + logger.info(f'Created NotificationType "{event_name}" with content_type {content_type}') + + +def update_notification_types(*args, **kwargs): + + with open(settings.NOTIFICATION_TYPES_YAML) as stream: + notification_types = yaml.safe_load(stream) + for notification_type in notification_types['notification_types']: + notification_type.pop('__docs__') + object_content_type_model_name = notification_type.pop('object_content_type_model_name') + notification_freq = notification_type.pop('notification_freq_default') + + if object_content_type_model_name == 'desk': + content_type = None + elif object_content_type_model_name == 'osfuser': + OSFUser = apps.get_model('osf', 'OSFUser') + content_type = ContentType.objects.get_for_model(OSFUser) + elif object_content_type_model_name == 'preprint': + Preprint = apps.get_model('osf', 'Preprint') + content_type = ContentType.objects.get_for_model(Preprint) + elif object_content_type_model_name == 'collectionsubmission': + CollectionSubmission = apps.get_model('osf', 'CollectionSubmission') + content_type = ContentType.objects.get_for_model(CollectionSubmission) + elif object_content_type_model_name == 'abstractprovider': + AbstractProvider = apps.get_model('osf', 'abstractprovider') + content_type = ContentType.objects.get_for_model(AbstractProvider) + elif object_content_type_model_name == 'osfuser': + OSFUser = apps.get_model('osf', 'OSFUser') + content_type = ContentType.objects.get_for_model(OSFUser) + else: + try: + content_type = ContentType.objects.get( + app_label='osf', + model=object_content_type_model_name + ) + except ContentType.DoesNotExist: + raise ValueError(f'No content type for osf.{object_content_type_model_name}') + + with open(notification_type['template']) as stream: + template = stream.read() + + notification_types['template'] = template + notification_types['notification_freq'] = notification_freq + nt, _ = NotificationType.objects.update_or_create( + name=notification_type['name'], + defaults=notification_type, + ) + nt.object_content_type = content_type + nt.save() + + +class Command(BaseCommand): + help = 'Migrate legacy NotificationSubscriptionLegacy objects to new Notification app models.' + + def handle(self, *args, **options): + with transaction.atomic(): + update_notification_types(args, options) + + with transaction.atomic(): + migrate_legacy_notification_subscriptions(args, options) diff --git a/osf/management/commands/populate_collection_provider_notification_subscriptions.py b/osf/management/commands/populate_collection_provider_notification_subscriptions.py index 5713b08061b..c3a21eb8d20 100644 --- a/osf/management/commands/populate_collection_provider_notification_subscriptions.py +++ b/osf/management/commands/populate_collection_provider_notification_subscriptions.py @@ -1,7 +1,7 @@ import logging from django.core.management.base import BaseCommand -from osf.models import NotificationSubscription, CollectionProvider +from osf.models import NotificationSubscriptionLegacy, CollectionProvider logger = logging.getLogger(__file__) @@ -12,7 +12,7 @@ def populate_collection_provider_notification_subscriptions(): provider_moderators = provider.get_group('moderator').user_set.all() for subscription in provider.DEFAULT_SUBSCRIPTIONS: - instance, created = NotificationSubscription.objects.get_or_create( + instance, created = NotificationSubscriptionLegacy.objects.get_or_create( _id=f'{provider._id}_{subscription}', event_name=subscription, provider=provider diff --git a/osf/management/commands/populate_registration_provider_notification_subscriptions.py b/osf/management/commands/populate_registration_provider_notification_subscriptions.py index fe372fcbb80..db4b44acba5 100644 --- a/osf/management/commands/populate_registration_provider_notification_subscriptions.py +++ b/osf/management/commands/populate_registration_provider_notification_subscriptions.py @@ -2,7 +2,7 @@ from django.contrib.auth.models import Group from django.core.management.base import BaseCommand -from osf.models import NotificationSubscription, RegistrationProvider +from osf.models import RegistrationProvider, NotificationSubscriptionLegacy logger = logging.getLogger(__file__) @@ -17,7 +17,7 @@ def populate_registration_provider_notification_subscriptions(): continue for subscription in provider.DEFAULT_SUBSCRIPTIONS: - instance, created = NotificationSubscription.objects.get_or_create( + instance, created = NotificationSubscriptionLegacy.objects.get_or_create( _id=f'{provider._id}_{subscription}', event_name=subscription, provider=provider diff --git a/osf/management/commands/reindex_quickfiles.py b/osf/management/commands/reindex_quickfiles.py deleted file mode 100644 index 84bdfa7d310..00000000000 --- a/osf/management/commands/reindex_quickfiles.py +++ /dev/null @@ -1,43 +0,0 @@ -from django.core.paginator import Paginator -from website.search.search import update_file -from osf.models import Node, NodeLog -from addons.osfstorage.models import OsfStorageFileNode -from django.core.management.base import BaseCommand -from tqdm import tqdm - -PAGE_SIZE = 100 - - -def paginated_progressbar(queryset, page_size, function): - paginator = Paginator(queryset, page_size) - progress_bar = tqdm(total=queryset.count()) - n_processed = 0 - for page_num in paginator.page_range: - page = paginator.page(page_num) - for item in page.object_list: - function(item) - n_processed += len(page.object_list) - progress_bar.update(n_processed) - progress_bar.close() - - -def reindex_quickfiles(): - nodes = Node.objects.filter( - logs__action=NodeLog.MIGRATED_QUICK_FILES - ) - - file_ids = nodes.values_list('files__id', flat=True) - - files_to_reindex = OsfStorageFileNode.objects.filter(id__in=file_ids) - paginated_progressbar(files_to_reindex, PAGE_SIZE, update_file) - - for node in nodes: - node.update_search() - - -class Command(BaseCommand): - """ - Reindex all Quickfiles files that were moved during migration. h/t to erinspace who's code old I'm cribbing here. - """ - def handle(self, *args, **options): - reindex_quickfiles() diff --git a/osf/management/commands/transfer_quickfiles_to_projects.py b/osf/management/commands/transfer_quickfiles_to_projects.py deleted file mode 100644 index c4f5a8450f1..00000000000 --- a/osf/management/commands/transfer_quickfiles_to_projects.py +++ /dev/null @@ -1,180 +0,0 @@ -import pytz -import logging -import datetime - -from django.db import transaction -from django.db.models import Exists, F, Func, OuterRef, Value -from django.core.management.base import BaseCommand -from tqdm import tqdm - -from osf.models import ( - OSFUser, - QuickFilesNode, - NodeLog, - AbstractNode, - Guid, -) -from osf.models.base import generate_guid -from osf.models.quickfiles import get_quickfiles_project_title -from osf.models.queued_mail import QueuedMail -from osf.utils.datetime_aware_jsonfield import DateTimeAwareJSONField - -from addons.osfstorage.models import OsfStorageFile -from website import mails, settings -from django.contrib.contenttypes.models import ContentType - -logger = logging.getLogger(__name__) -QUICKFILES_DESC = 'The Quick Files feature was discontinued and it’s files were migrated into this Project on March' \ - ' 11, 2022. The file URL’s will still resolve properly, and the Quick Files logs are available in' \ - ' the Project’s Recent Activity.' -QUICKFILES_DATE = datetime.datetime(2022, 3, 11, tzinfo=pytz.utc) - - -def remove_quickfiles(): - node_content_type = ContentType.objects.get_for_model(AbstractNode) - quick_file_annotation = Exists( - OsfStorageFile.objects.filter( - target_object_id=OuterRef('id'), - target_content_type=node_content_type - ) - ) - quick_files_nodes = QuickFilesNode.objects.annotate(has_files=quick_file_annotation).filter(has_files=True) - target_count = quick_files_nodes.count() - logger.info(f'Acquired {target_count} targets') - - _ = Guid.objects.filter( - id__in=quick_files_nodes.values_list('guids__id', flat=True) - ).delete() - logger.info(f'Deleted guids: {_}') - - # generate unique guids prior to record creation to avoid collisions, set object ensures all guids are unique - guids = set() - while len(guids) < target_count: - guids.add(generate_guid()) - guids = list(guids) - logger.info(f'Generated {len(guids)} Guids') - - guids = [ - Guid( - _id=_id, - object_id=node_id, - content_type=node_content_type, - ) for _id, node_id in zip(guids, quick_files_nodes.values_list('id', flat=True)) - ] - Guid.objects.bulk_create(guids) - logger.info(f'Created {len(guids)} Guids') - - node_logs = [] - queued_mail = [] - pbar = tqdm(total=target_count) - for node in quick_files_nodes: - node_logs.append(NodeLog( - node=node, - user=node.creator, - original_node=node, - params={'node': node._id}, - action=NodeLog.MIGRATED_QUICK_FILES - )) - queued_mail.append(QueuedMail( - user=node.creator, - to_addr=node.creator.email, - send_at=QUICKFILES_DATE, - email_type=mails.QUICKFILES_MIGRATED.tpl_prefix, - data=dict( - osf_support_email=settings.OSF_SUPPORT_EMAIL, - can_change_preferences=False, - quickfiles_link=node.absolute_url - ) - )) - node.logs.update( - params=Func( - F('params'), - Value(['node']), - Value(node._id, DateTimeAwareJSONField()), - function='jsonb_set' - ) - ) - pbar.update(1) - pbar.close() - - logger.info('Updated logs') - NodeLog.objects.bulk_create(node_logs) - logger.info(f'Created {len(node_logs)} logs') - QueuedMail.objects.bulk_create(queued_mail) - logger.info(f'Created {len(queued_mail)} mails') - - quick_files_nodes.update(description=QUICKFILES_DESC, type='osf.node') - logger.info(f'Projectified {target_count} QuickFilesNodes') - - -def reverse_remove_quickfiles(): - quickfiles_nodes_with_files = AbstractNode.objects.filter( - logs__action=NodeLog.MIGRATED_QUICK_FILES - ) - for node in quickfiles_nodes_with_files: - node.guids.all().delete() - node.save() - - quickfiles_nodes_with_files.update( - type='osf.quickfilesnode', - is_deleted=False, - deleted=None, - ) - - users_without_nodes = OSFUser.objects.exclude( - id__in=QuickFilesNode.objects.all().values_list( - 'creator__id', - flat=True - ) - ) - quickfiles_created = [] - for user in users_without_nodes: - quickfiles_created.append( - QuickFilesNode( - title=get_quickfiles_project_title(user), - creator=user - ) - ) - - QuickFilesNode.objects.bulk_create(quickfiles_created) - - for quickfiles in quickfiles_created: - quickfiles.add_addon('osfstorage', auth=None, log=False) - quickfiles.save() - - NodeLog.objects.filter(action=NodeLog.MIGRATED_QUICK_FILES).delete() - - logger.info(f'{len(QuickFilesNode.objects.all())} quickfiles were restored.') - - -class Command(BaseCommand): - """ - Puts all Quickfiles into projects or reverses the effect. - """ - - def add_arguments(self, parser): - super().add_arguments(parser) - parser.add_argument( - '--dry', - action='store_true', - dest='dry_run', - help='Run migration and roll back changes to db', - required=False, - ) - parser.add_argument( - '--reverse', - type=bool, - help='is the reverse to be run?.', - required=False, - ) - - def handle(self, *args, **options): - dry_run = options.get('dry_run', False) - reverse = options.get('reverse', False) - with transaction.atomic(): - if reverse: - reverse_remove_quickfiles() - else: - remove_quickfiles() - if dry_run: - raise RuntimeError('Dry run complete, rolling back.') diff --git a/osf/management/commands/update_storage_usage.py b/osf/management/commands/update_storage_usage.py index 09f087bd3e4..b6825de3343 100644 --- a/osf/management/commands/update_storage_usage.py +++ b/osf/management/commands/update_storage_usage.py @@ -20,7 +20,7 @@ def update_storage_usage(dry_run=False, days=DAYS): recently_modified = AbstractNode.objects.filter(modified__gt=modified_limit) for modified_node in recently_modified: file_op_occurred = modified_node.logs.filter(action__contains='file', created__gt=modified_limit).exists() - if not modified_node.is_quickfiles and file_op_occurred: + if file_op_occurred: update_storage_usage_cache(modified_node.id, modified_node._id) if dry_run: diff --git a/osf/metrics/reporters/osfstorage_file_count.py b/osf/metrics/reporters/osfstorage_file_count.py index 2f35e1e81fd..dd449da5f08 100644 --- a/osf/metrics/reporters/osfstorage_file_count.py +++ b/osf/metrics/reporters/osfstorage_file_count.py @@ -45,3 +45,9 @@ def report(self, date): ) return [report] + + def keen_events_from_report(self, report): + event = { + 'osfstorage_files': report.files.to_dict(), + } + return {'file_summary': [event]} diff --git a/osf/migrations/0001_initial.py b/osf/migrations/0001_initial.py index 75c7297114e..afa5a632f9c 100644 --- a/osf/migrations/0001_initial.py +++ b/osf/migrations/0001_initial.py @@ -116,7 +116,7 @@ class Migration(migrations.Migration): ('category', models.CharField(blank=True, choices=[('analysis', 'Analysis'), ('communication', 'Communication'), ('data', 'Data'), ('hypothesis', 'Hypothesis'), ('instrumentation', 'Instrumentation'), ('methods and measures', 'Methods and Measures'), ('procedure', 'Procedure'), ('project', 'Project'), ('software', 'Software'), ('other', 'Other'), ('', 'Uncategorized')], default='', max_length=255)), ('registration_responses', osf.utils.datetime_aware_jsonfield.DateTimeAwareJSONField(blank=True, default=dict, encoder=osf.utils.datetime_aware_jsonfield.DateTimeAwareJSONEncoder)), ('registration_responses_migrated', models.NullBooleanField(db_index=True, default=True)), - ('type', models.CharField(choices=[('osf.node', 'node'), ('osf.draftnode', 'draft node'), ('osf.registration', 'registration'), ('osf.quickfilesnode', 'quick files node')], db_index=True, max_length=255)), + ('type', models.CharField(choices=[('osf.node', 'node'), ('osf.draftnode', 'draft node'), ('osf.registration', 'registration')], db_index=True, max_length=255)), ('child_node_subscriptions', osf.utils.datetime_aware_jsonfield.DateTimeAwareJSONField(blank=True, default=dict, encoder=osf.utils.datetime_aware_jsonfield.DateTimeAwareJSONEncoder)), ('deleted_date', osf.utils.fields.NonNaiveDateTimeField(blank=True, null=True)), ('deleted', osf.utils.fields.NonNaiveDateTimeField(blank=True, null=True)), @@ -2225,16 +2225,6 @@ class Migration(migrations.Migration): }, bases=('osf.abstractprovider',), ), - migrations.CreateModel( - name='QuickFilesNode', - fields=[ - ], - options={ - 'proxy': True, - 'indexes': [], - }, - bases=('osf.abstractnode',), - ), migrations.CreateModel( name='Registration', fields=[ diff --git a/osf/migrations/0003_aggregated_runsql_calls.py b/osf/migrations/0003_aggregated_runsql_calls.py index 985bed65e86..bf945b0f2dd 100644 --- a/osf/migrations/0003_aggregated_runsql_calls.py +++ b/osf/migrations/0003_aggregated_runsql_calls.py @@ -11,7 +11,6 @@ class Migration(migrations.Migration): migrations.RunSQL( [ """ - CREATE UNIQUE INDEX one_quickfiles_per_user ON public.osf_abstractnode USING btree (creator_id, type, is_deleted) WHERE (((type)::text = 'osf.quickfilesnode'::text) AND (is_deleted = false)); CREATE INDEX osf_abstractnode_collection_pub_del_type_index ON public.osf_abstractnode USING btree (is_public, is_deleted, type) WHERE ((is_public = true) AND (is_deleted = false) AND ((type)::text = 'osf.collection'::text)); CREATE INDEX osf_abstractnode_date_modified_ef1e2ad8 ON public.osf_abstractnode USING btree (last_logged); CREATE INDEX osf_abstractnode_node_pub_del_type_index ON public.osf_abstractnode USING btree (is_public, is_deleted, type) WHERE ((is_public = true) AND (is_deleted = false) AND ((type)::text = 'osf.node'::text)); diff --git a/osf/migrations/0016_auto_20230828_1810.py b/osf/migrations/0016_auto_20230828_1810.py index 50af929ea95..36f056c8ef1 100644 --- a/osf/migrations/0016_auto_20230828_1810.py +++ b/osf/migrations/0016_auto_20230828_1810.py @@ -23,6 +23,6 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='abstractnode', name='type', - field=models.CharField(choices=[('osf.node', 'node'), ('osf.draftnode', 'draft node'), ('osf.quickfilesnode', 'quick files node'), ('osf.registration', 'registration')], db_index=True, max_length=255), + field=models.CharField(choices=[('osf.node', 'node'), ('osf.draftnode', 'draft node'), ('osf.registration', 'registration')], db_index=True, max_length=255), ), ] diff --git a/osf/migrations/0022_alter_abstractnode_subjects_alter_abstractnode_tags_and_more.py b/osf/migrations/0022_alter_abstractnode_subjects_alter_abstractnode_tags_and_more.py index e0b29a4ba9c..17294bd76a1 100644 --- a/osf/migrations/0022_alter_abstractnode_subjects_alter_abstractnode_tags_and_more.py +++ b/osf/migrations/0022_alter_abstractnode_subjects_alter_abstractnode_tags_and_more.py @@ -26,7 +26,7 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='abstractnode', name='type', - field=models.CharField(choices=[('osf.draftnode', 'draft node'), ('osf.node', 'node'), ('osf.quickfilesnode', 'quick files node'), ('osf.registration', 'registration')], db_index=True, max_length=255), + field=models.CharField(choices=[('osf.draftnode', 'draft node'), ('osf.node', 'node'), ('osf.registration', 'registration')], db_index=True, max_length=255), ), migrations.AlterField( model_name='abstractprovider', diff --git a/osf/migrations/0031_alter_osfgroupgroupobjectpermission_unique_together_and_more.py b/osf/migrations/0031_alter_osfgroupgroupobjectpermission_unique_together_and_more.py new file mode 100644 index 00000000000..255a735de5f --- /dev/null +++ b/osf/migrations/0031_alter_osfgroupgroupobjectpermission_unique_together_and_more.py @@ -0,0 +1,65 @@ +# Generated by Django 4.2.15 on 2025-04-25 12:54 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('osf', '0030_abstractnode__manual_guid'), + ] + + operations = [ + migrations.AlterUniqueTogether( + name='osfgroupgroupobjectpermission', + unique_together=None, + ), + migrations.RemoveField( + model_name='osfgroupgroupobjectpermission', + name='content_object', + ), + migrations.RemoveField( + model_name='osfgroupgroupobjectpermission', + name='group', + ), + migrations.RemoveField( + model_name='osfgroupgroupobjectpermission', + name='permission', + ), + migrations.RemoveField( + model_name='osfgrouplog', + name='group', + ), + migrations.RemoveField( + model_name='osfgrouplog', + name='user', + ), + migrations.AlterUniqueTogether( + name='osfgroupuserobjectpermission', + unique_together=None, + ), + migrations.RemoveField( + model_name='osfgroupuserobjectpermission', + name='content_object', + ), + migrations.RemoveField( + model_name='osfgroupuserobjectpermission', + name='permission', + ), + migrations.RemoveField( + model_name='osfgroupuserobjectpermission', + name='user', + ), + migrations.DeleteModel( + name='OSFGroup', + ), + migrations.DeleteModel( + name='OSFGroupGroupObjectPermission', + ), + migrations.DeleteModel( + name='OSFGroupLog', + ), + migrations.DeleteModel( + name='OSFGroupUserObjectPermission', + ), + ] diff --git a/osf/migrations/0032_alter_notificationsubscription_options_and_more.py b/osf/migrations/0032_alter_notificationsubscription_options_and_more.py new file mode 100644 index 00000000000..b4f273108d5 --- /dev/null +++ b/osf/migrations/0032_alter_notificationsubscription_options_and_more.py @@ -0,0 +1,132 @@ +# Generated by Django 4.2.13 on 2025-07-08 17:07 + +from django.conf import settings +import django.contrib.postgres.fields +from django.db import migrations, models +import django.db.models.deletion +import django_extensions.db.fields +import osf.models.base +import osf.models.notification_type + + +class Migration(migrations.Migration): + + dependencies = [ + ('contenttypes', '0002_remove_content_type_name'), + ('osf', '0031_alter_osfgroupgroupobjectpermission_unique_together_and_more'), + ] + + operations = [ + migrations.AlterModelOptions( + name='notificationsubscription', + options={'verbose_name': 'Notification Subscription', 'verbose_name_plural': 'Notification Subscriptions'}, + ), + migrations.AlterUniqueTogether( + name='notificationsubscription', + unique_together=set(), + ), + migrations.AddField( + model_name='notificationsubscription', + name='content_type', + field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='contenttypes.contenttype'), + ), + migrations.AddField( + model_name='notificationsubscription', + name='message_frequency', + field=models.CharField(max_length=500, null=True), + ), + migrations.AddField( + model_name='notificationsubscription', + name='object_id', + field=models.CharField(blank=True, max_length=255, null=True), + ), + migrations.AlterField( + model_name='notificationsubscription', + name='user', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='subscriptions', to=settings.AUTH_USER_MODEL), + ), + migrations.CreateModel( + name='NotificationType', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('notification_interval_choices', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=32), blank=True, default=osf.models.notification_type.get_default_frequency_choices, size=None)), + ('name', models.CharField(max_length=255, unique=True)), + ('template', models.TextField(help_text='Template used to render the event_info. Supports Django template syntax.')), + ('subject', models.TextField(blank=True, help_text='Template used to render the subject line of email. Supports Django template syntax.', null=True)), + ('object_content_type', models.ForeignKey(blank=True, help_text='Content type for subscribed objects. Null means global event.', null=True, on_delete=django.db.models.deletion.SET_NULL, to='contenttypes.contenttype')), + ], + options={ + 'verbose_name': 'Notification Type', + 'verbose_name_plural': 'Notification Types', + }, + ), + migrations.CreateModel( + name='Notification', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('event_context', models.JSONField()), + ('sent', models.DateTimeField(blank=True, null=True)), + ('seen', models.DateTimeField(blank=True, null=True)), + ('created', models.DateTimeField(auto_now_add=True)), + ('subscription', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='notifications', to='osf.notificationsubscription')), + ], + options={ + 'verbose_name': 'Notification', + 'verbose_name_plural': 'Notifications', + }, + ), + migrations.RemoveField( + model_name='notificationsubscription', + name='_id', + ), + migrations.RemoveField( + model_name='notificationsubscription', + name='email_digest', + ), + migrations.RemoveField( + model_name='notificationsubscription', + name='email_transactional', + ), + migrations.RemoveField( + model_name='notificationsubscription', + name='event_name', + ), + migrations.RemoveField( + model_name='notificationsubscription', + name='node', + ), + migrations.RemoveField( + model_name='notificationsubscription', + name='none', + ), + migrations.RemoveField( + model_name='notificationsubscription', + name='provider', + ), + migrations.AddField( + model_name='notificationsubscription', + name='notification_type', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='osf.notificationtype'), + ), + migrations.CreateModel( + name='NotificationSubscriptionLegacy', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('created', django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created')), + ('modified', django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified')), + ('_id', models.CharField(db_index=True, max_length=100)), + ('event_name', models.CharField(max_length=100)), + ('email_digest', models.ManyToManyField(related_name='+', to=settings.AUTH_USER_MODEL)), + ('email_transactional', models.ManyToManyField(related_name='+', to=settings.AUTH_USER_MODEL)), + ('node', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='notification_subscriptions', to='osf.node')), + ('none', models.ManyToManyField(related_name='+', to=settings.AUTH_USER_MODEL)), + ('provider', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='notification_subscriptions', to='osf.abstractprovider')), + ('user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='notification_subscriptions', to=settings.AUTH_USER_MODEL)), + ], + options={ + 'db_table': 'osf_notificationsubscription_legacy', + 'unique_together': {('_id', 'provider')}, + }, + bases=(models.Model, osf.models.base.QuerySetExplainMixin), + ), + ] diff --git a/osf/models/__init__.py b/osf/models/__init__.py index 275fd148b6c..d09e350adfe 100644 --- a/osf/models/__init__.py +++ b/osf/models/__init__.py @@ -62,14 +62,16 @@ from .node_relation import NodeRelation from .nodelog import NodeLog from .notable_domain import NotableDomain, DomainReference -from .notifications import NotificationDigest, NotificationSubscription +from .notifications import NotificationDigest, NotificationSubscriptionLegacy +from .notification_subscription import NotificationSubscription +from .notification_type import NotificationType +from .notification import Notification + from .oauth import ( ApiOAuth2Application, ApiOAuth2PersonalToken, ApiOAuth2Scope, ) -from .osf_group import OSFGroup -from .osf_grouplog import OSFGroupLog from .outcome_artifacts import OutcomeArtifact from .outcomes import Outcome from .preprint import Preprint @@ -83,7 +85,6 @@ WhitelistedSHAREPreprintProvider, ) from .queued_mail import QueuedMail -from .quickfiles import QuickFilesNode from .registrations import ( DraftRegistration, DraftRegistrationLog, @@ -111,4 +112,3 @@ OSFUser, ) from .user_message import UserMessage - diff --git a/osf/models/collection_submission.py b/osf/models/collection_submission.py index 893533d85d1..56c5a64f659 100644 --- a/osf/models/collection_submission.py +++ b/osf/models/collection_submission.py @@ -132,10 +132,10 @@ def _notify_moderators_pending(self, event_data): 'allow_submissions': True, } - from .notifications import NotificationSubscription + from .notifications import NotificationSubscriptionLegacy from website.notifications.emails import store_emails - provider_subscription, created = NotificationSubscription.objects.get_or_create( + provider_subscription, created = NotificationSubscriptionLegacy.objects.get_or_create( _id=f'{self.collection.provider._id}_new_pending_submissions', provider=self.collection.provider ) diff --git a/osf/models/comment.py b/osf/models/comment.py index 586763956ee..032085cd0e9 100644 --- a/osf/models/comment.py +++ b/osf/models/comment.py @@ -13,7 +13,6 @@ from framework.exceptions import PermissionsError from website import settings from website.util import api_v2_url -from website.project import signals as project_signals from website.project.model import get_valid_mentioned_users_guids @@ -164,7 +163,6 @@ def create(cls, auth, **kwargs): comment.save() new_mentions = get_valid_mentioned_users_guids(comment, comment.node.contributors_and_group_members) if new_mentions: - project_signals.mention_added.send(comment, new_mentions=new_mentions, auth=auth) comment.ever_mentioned.add(*comment.node.contributors.filter(guids___id__in=new_mentions)) comment.save() @@ -177,8 +175,6 @@ def create(cls, auth, **kwargs): ) comment.node.save() - project_signals.comment_added.send(comment, auth=auth, new_mentions=new_mentions) - return comment def edit(self, content, auth, save=False): @@ -198,7 +194,6 @@ def edit(self, content, auth, save=False): if save: if new_mentions: - project_signals.mention_added.send(self, new_mentions=new_mentions, auth=auth) self.ever_mentioned.add(*self.node.contributors.filter(guids___id__in=new_mentions)) self.save() self.node.add_log( diff --git a/osf/models/mixins.py b/osf/models/mixins.py index 7aaf27b5f89..1e2d033e713 100644 --- a/osf/models/mixins.py +++ b/osf/models/mixins.py @@ -2246,7 +2246,7 @@ def suspend_spam_user(self, user): # Make public nodes private from this contributor for node in user.all_nodes: - if self._id != node._id and len(node.contributors) == 1 and node.is_public and not node.is_quickfiles: + if self._id != node._id and len(node.contributors) == 1 and node.is_public: node.confirm_spam(save=True, train_spam_services=False) # Make preprints private from this contributor diff --git a/osf/models/node.py b/osf/models/node.py index 76d814ce97c..fb7a7f1e102 100644 --- a/osf/models/node.py +++ b/osf/models/node.py @@ -26,7 +26,7 @@ GroupObjectPermissionBase, UserObjectPermissionBase, ) -from guardian.shortcuts import get_objects_for_user, get_groups_with_perms +from guardian.shortcuts import get_objects_for_user from framework import status from framework.auth import oauth_scopes @@ -91,7 +91,7 @@ class AbstractNodeQuerySet(GuidMixinQuerySet): def get_roots(self): return self.filter( - id__in=self.exclude(type__in=['osf.collection', 'osf.quickfilesnode', 'osf.draftnode']).values_list( + id__in=self.exclude(type__in=['osf.collection', 'osf.draftnode']).values_list( 'root_id', flat=True)) def get_children(self, root, active=False, include_root=False): @@ -491,10 +491,6 @@ def is_registration(self): """For v1 compat.""" return False - @property - def is_quickfiles(self): - return False - @property def is_original(self): return not self.is_registration and not self.is_fork @@ -799,50 +795,6 @@ def can_edit(self, auth=None, user=None): is_api_node = False return (user and self.has_permission(user, WRITE)) or is_api_node - def add_osf_group(self, group, permission=WRITE, auth=None): - if auth and not self.has_permission(auth.user, ADMIN): - raise PermissionsError('Must be an admin to add an OSF Group.') - group.add_group_to_node(self, permission, auth) - - def update_osf_group(self, group, permission=WRITE, auth=None): - if auth and not self.has_permission(auth.user, ADMIN): - raise PermissionsError('Must be an admin to add an OSF Group.') - group.update_group_permissions_to_node(self, permission, auth) - - def remove_osf_group(self, group, auth=None): - if auth and not (self.has_permission(auth.user, ADMIN) or group.has_permission(auth.user, 'manage')): - raise PermissionsError('Must be an admin or an OSF Group manager to remove an OSF Group.') - group.remove_group_from_node(self, auth) - - @property - def osf_groups(self): - """Returns a queryset of OSF Groups whose members have some permission to the node - """ - from .osf_group import OSFGroupGroupObjectPermission, OSFGroup - - member_groups = get_groups_with_perms(self).filter(name__icontains='osfgroup') - return OSFGroup.objects.filter( - id__in=OSFGroupGroupObjectPermission.objects.filter(group_id__in=member_groups).values_list( - 'content_object_id')) - - def get_osf_groups_with_perms(self, permission): - """Returns a queryset of OSF Groups whose members have the specified permission to the node - """ - from .osf_group import OSFGroup - from .node import NodeGroupObjectPermission - try: - perm_id = Permission.objects.get(codename=permission + '_node').id - except Permission.DoesNotExist: - raise ValueError('Specified permission does not exist.') - member_groups = NodeGroupObjectPermission.objects.filter( - permission_id=perm_id, content_object_id=self.id - ).filter( - group__name__icontains='osfgroup' - ).values_list( - 'group_id', flat=True - ) - return OSFGroup.objects.filter(osfgroupgroupobjectpermission__group_id__in=member_groups) - def get_logs_queryset(self, auth): return NodeLog.objects.filter( node_id=self.id, @@ -2165,10 +2117,10 @@ def update(self, fields, auth=None, save=True): if not hasattr(self, 'is_bookmark_collection'): self.set_title(title=value, auth=auth, save=False) continue - if not self.is_bookmark_collection or not self.is_quickfiles: + if not self.is_bookmark_collection: self.set_title(title=value, auth=auth, save=False) else: - raise NodeUpdateError(reason='Bookmark collections or QuickFilesNodes cannot be renamed.', key=key) + raise NodeUpdateError(reason='Bookmark collections cannot be renamed.', key=key) elif key == 'description': self.set_description(description=value, auth=auth, save=False) elif key == 'category': @@ -2621,7 +2573,6 @@ def add_default_node_addons(sender, instance, created, **kwargs): @receiver(post_save, sender=Node) @receiver(post_save, sender='osf.Registration') -@receiver(post_save, sender='osf.QuickFilesNode') @receiver(post_save, sender='osf.DraftNode') def set_parent_and_root(sender, instance, created, *args, **kwargs): if getattr(instance, '_parent', None): diff --git a/osf/models/nodelog.py b/osf/models/nodelog.py index d6e01f4822b..a9f0bf63103 100644 --- a/osf/models/nodelog.py +++ b/osf/models/nodelog.py @@ -141,8 +141,6 @@ class NodeLog(ObjectIDMixin, BaseModel): FLAG_SPAM = 'flag_spam' CONFIRM_SPAM = 'confirm_spam' - MIGRATED_QUICK_FILES = 'migrated_quickfiles' - RESOURCE_ADDED = 'resource_identifier_added' RESOURCE_UPDATED = 'resource_identifier_udpated' RESOURCE_REMOVED = 'resource_identifier_removed' diff --git a/osf/models/notification.py b/osf/models/notification.py new file mode 100644 index 00000000000..14fc4fd3155 --- /dev/null +++ b/osf/models/notification.py @@ -0,0 +1,65 @@ +import logging + +from django.db import models +from website import settings +from api.base import settings as api_settings +from osf import email + +class Notification(models.Model): + subscription = models.ForeignKey( + 'NotificationSubscription', + on_delete=models.CASCADE, + related_name='notifications' + ) + event_context: dict = models.JSONField() + sent = models.DateTimeField(null=True, blank=True) + seen = models.DateTimeField(null=True, blank=True) + created = models.DateTimeField(auto_now_add=True) + + def send(self, protocol_type='email', recipient=None): + if not protocol_type == 'email': + raise NotImplementedError(f'Protocol type {protocol_type}. Email notifications are only implemented.') + + recipient_address = getattr(recipient, 'username', None) or self.subscription.user + + if protocol_type == 'email' and settings.DEV_MODE and settings.ENABLE_TEST_EMAIL: + email.send_email_over_smtp( + recipient_address, + self.subscription.notification_type, + self.event_context + ) + elif protocol_type == 'email' and settings.DEV_MODE: + if not api_settings.CI_ENV: + logging.info( + f"Attempting to send email in DEV_MODE with ENABLE_TEST_EMAIL false just logs:" + f"\nto={recipient_address}" + f"\ntype={self.subscription.notification_type.name}" + f"\ncontext={self.event_context}" + ) + elif protocol_type == 'email': + email.send_email_with_send_grid( + getattr(recipient, 'username', None) or self.subscription.user, + self.subscription.notification_type, + self.event_context + ) + else: + raise NotImplementedError(f'protocol `{protocol_type}` is not supported.') + + self.mark_sent() + + def mark_sent(self) -> None: + raise NotImplementedError('mark_sent must be implemented by subclasses.') + # self.sent = timezone.now() + # self.save(update_fields=['sent']) + + def mark_seen(self) -> None: + raise NotImplementedError('mark_seen must be implemented by subclasses.') + # self.seen = timezone.now() + # self.save(update_fields=['seen']) + + def __str__(self) -> str: + return f'Notification for {self.subscription.user} [{self.subscription.notification_type.name}]' + + class Meta: + verbose_name = 'Notification' + verbose_name_plural = 'Notifications' diff --git a/osf/models/notification_subscription.py b/osf/models/notification_subscription.py new file mode 100644 index 00000000000..a1c9467b50e --- /dev/null +++ b/osf/models/notification_subscription.py @@ -0,0 +1,101 @@ +from django.db import models +from django.contrib.contenttypes.fields import GenericForeignKey +from django.contrib.contenttypes.models import ContentType +from django.core.exceptions import ValidationError +from osf.models.notification_type import get_default_frequency_choices +from osf.models.notification import Notification + +from .base import BaseModel + + +class NotificationSubscription(BaseModel): + notification_type = models.ForeignKey( + 'NotificationType', + on_delete=models.CASCADE, + null=True + ) + user = models.ForeignKey( + 'osf.OSFUser', + null=True, + on_delete=models.CASCADE, + related_name='subscriptions' + ) + message_frequency: str = models.CharField( + max_length=500, + null=True + ) + + content_type = models.ForeignKey(ContentType, null=True, blank=True, on_delete=models.CASCADE) + object_id = models.CharField(max_length=255, null=True, blank=True) + subscribed_object = GenericForeignKey('content_type', 'object_id') + + def clean(self): + ct = self.notification_type.object_content_type + + if ct: + if self.content_type != ct: + raise ValidationError('Subscribed object must match type\'s content_type.') + if not self.object_id: + raise ValidationError('Subscribed object ID is required.') + else: + if self.content_type or self.object_id: + raise ValidationError('Global subscriptions must not have an object.') + + allowed_freqs = self.notification_type.notification_interval_choices or get_default_frequency_choices() + if self.message_frequency not in allowed_freqs: + raise ValidationError(f'{self.message_frequency!r} is not allowed for {self.notification_type.name!r}.') + + def __str__(self) -> str: + return f'<{self.user} via {self.subscribed_object} subscribes to {self.notification_type.name} ({self.message_frequency})>' + + class Meta: + verbose_name = 'Notification Subscription' + verbose_name_plural = 'Notification Subscriptions' + + def emit(self, user, subscribed_object=None, event_context=None): + """Emit a notification to a user by creating Notification and NotificationSubscription objects. + + Args: + user (OSFUser): The recipient of the notification. + subscribed_object (optional): The object the subscription is related to. + event_context (dict, optional): Context for rendering the notification template. + """ + if self.message_frequency == 'instantly': + Notification.objects.create( + subscription=self, + event_context=event_context + ).send() + else: + Notification.objects.create( + subscription=self, + event_context=event_context + ) + + @property + def absolute_api_v2_url(self): + from api.base.utils import absolute_reverse + return absolute_reverse('institutions:institution-detail', kwargs={'institution_id': self._id, 'version': 'v2'}) + + @property + def _id(self): + """ + Legacy subscription id for API compatibility. + Provider: _ + User/global: _global_ + Node/etc: _ + """ + # Safety checks + event = self.notification_type.name + ct = self.notification_type.object_content_type + match getattr(ct, 'model', None): + case 'preprintprovider' | 'collectionprovider' | 'registrationprovider': + # Providers: use subscribed_object._id (which is the provider short name, e.g. 'mindrxiv') + return f'{self.subscribed_object._id}_new_pending_submissions' + case 'node' | 'collection' | 'preprint': + # Node-like objects: use object_id (guid) + return f'{self.subscribed_object._id}_{event}' + case 'osfuser' | 'user' | None: + # Global: _global + return f'{self.user._id}_global' + case _: + raise NotImplementedError() diff --git a/osf/models/notification_type.py b/osf/models/notification_type.py new file mode 100644 index 00000000000..9b36d20e93a --- /dev/null +++ b/osf/models/notification_type.py @@ -0,0 +1,250 @@ +from django.db import models +from django.contrib.postgres.fields import ArrayField +from django.contrib.contenttypes.models import ContentType + +from osf.models.notification import Notification +from enum import Enum + + +class FrequencyChoices(Enum): + NONE = 'none' + INSTANTLY = 'instantly' + DAILY = 'daily' + WEEKLY = 'weekly' + MONTHLY = 'monthly' + + @classmethod + def choices(cls): + return [(key.value, key.name.capitalize()) for key in cls] + +def get_default_frequency_choices(): + DEFAULT_FREQUENCY_CHOICES = ['none', 'instantly', 'daily', 'weekly', 'monthly'] + return DEFAULT_FREQUENCY_CHOICES.copy() + + +class NotificationType(models.Model): + + class Type(str, Enum): + # Desk notifications + DESK_REQUEST_EXPORT = 'desk_request_export' + DESK_REQUEST_DEACTIVATION = 'desk_request_deactivation' + DESK_OSF_SUPPORT_EMAIL = 'desk_osf_support_email' + DESK_REGISTRATION_BULK_UPLOAD_PRODUCT_OWNER = 'desk_registration_bulk_upload_product_owner' + DESK_USER_REGISTRATION_BULK_UPLOAD_UNEXPECTED_FAILURE = 'desk_user_registration_bulk_upload_unexpected_failure' + DESK_ARCHIVE_JOB_EXCEEDED = 'desk_archive_job_exceeded' + DESK_ARCHIVE_JOB_COPY_ERROR = 'desk_archive_job_copy_error' + DESK_ARCHIVE_JOB_FILE_NOT_FOUND = 'desk_archive_job_file_not_found' + DESK_ARCHIVE_JOB_UNCAUGHT_ERROR = 'desk_archive_job_uncaught_error' + + # User notifications + USER_PENDING_VERIFICATION = 'user_pending_verification' + USER_PENDING_VERIFICATION_REGISTERED = 'user_pending_verification_registered' + USER_STORAGE_CAP_EXCEEDED_ANNOUNCEMENT = 'user_storage_cap_exceeded_announcement' + USER_SPAM_BANNED = 'user_spam_banned' + USER_REQUEST_DEACTIVATION_COMPLETE = 'user_request_deactivation_complete' + USER_PRIMARY_EMAIL_CHANGED = 'user_primary_email_changed' + USER_INSTITUTION_DEACTIVATION = 'user_institution_deactivation' + USER_FORGOT_PASSWORD = 'user_forgot_password' + USER_FORGOT_PASSWORD_INSTITUTION = 'user_forgot_password_institution' + USER_REQUEST_EXPORT = 'user_request_export' + USER_CONTRIBUTOR_ADDED_OSF_PREPRINT = 'user_contributor_added_osf_preprint' + USER_CONTRIBUTOR_ADDED_DEFAULT = 'user_contributor_added_default' + USER_DUPLICATE_ACCOUNTS_OSF4I = 'user_duplicate_accounts_osf4i' + USER_EXTERNAL_LOGIN_LINK_SUCCESS = 'user_external_login_link_success' + USER_REGISTRATION_BULK_UPLOAD_FAILURE_ALL = 'user_registration_bulk_upload_failure_all' + USER_REGISTRATION_BULK_UPLOAD_SUCCESS_PARTIAL = 'user_registration_bulk_upload_success_partial' + USER_REGISTRATION_BULK_UPLOAD_SUCCESS_ALL = 'user_registration_bulk_upload_success_all' + USER_ADD_SSO_EMAIL_OSF4I = 'user_add_sso_email_osf4i' + USER_WELCOME_OSF4I = 'user_welcome_osf4i' + USER_ARCHIVE_JOB_EXCEEDED = 'user_archive_job_exceeded' + USER_ARCHIVE_JOB_COPY_ERROR = 'user_archive_job_copy_error' + USER_ARCHIVE_JOB_FILE_NOT_FOUND = 'user_archive_job_file_not_found' + USER_ARCHIVE_JOB_UNCAUGHT_ERROR = 'user_archive_job_uncaught_error' + USER_COMMENT_REPLIES = 'user_comment_replies' + USER_COMMENTS = 'user_comments' + USER_FILE_UPDATED = 'user_file_updated' + USER_COMMENT_MENTIONS = 'user_mentions' + USER_REVIEWS = 'user_reviews' + USER_PASSWORD_RESET = 'user_password_reset' + USER_CONTRIBUTOR_ADDED_DRAFT_REGISTRATION = 'user_contributor_added_draft_registration' + USER_EXTERNAL_LOGIN_CONFIRM_EMAIL_CREATE = 'user_external_login_confirm_email_create' + USER_EXTERNAL_LOGIN_CONFIRM_EMAIL_LINK = 'user_external_login_confirm_email_link' + USER_CONFIRM_MERGE = 'user_confirm_merge' + USER_CONFIRM_EMAIL = 'user_confirm_email' + USER_INITIAL_CONFIRM_EMAIL = 'user_initial_confirm_email' + USER_INVITE_DEFAULT = 'user_invite_default' + USER_PENDING_INVITE = 'user_pending_invite' + USER_FORWARD_INVITE = 'user_forward_invite' + USER_FORWARD_INVITE_REGISTERED = 'user_forward_invite_registered' + USER_INVITE_DRAFT_REGISTRATION = 'user_invite_draft_registration' + USER_INVITE_OSF_PREPRINT = 'user_invite_osf_preprint' + + # Node notifications + NODE_COMMENT = 'node_comments' + NODE_FILES_UPDATED = 'node_files_updated' + NODE_AFFILIATION_CHANGED = 'node_affiliation_changed' + NODE_REQUEST_ACCESS_SUBMITTED = 'node_access_request_submitted' + NODE_REQUEST_ACCESS_DENIED = 'node_request_access_denied' + NODE_FORK_COMPLETED = 'node_fork_completed' + NODE_FORK_FAILED = 'node_fork_failed' + NODE_REQUEST_INSTITUTIONAL_ACCESS_REQUEST = 'node_request_institutional_access_request' + NODE_CONTRIBUTOR_ADDED_ACCESS_REQUEST = 'node_contributor_added_access_request' + NODE_PENDING_EMBARGO_ADMIN = 'node_pending_embargo_admin' + NODE_PENDING_EMBARGO_NON_ADMIN = 'node_pending_embargo_non_admin' + NODE_PENDING_RETRACTION_NON_ADMIN = 'node_pending_retraction_non_admin' + NODE_PENDING_RETRACTION_ADMIN = 'node_pending_retraction_admin' + NODE_PENDING_REGISTRATION_NON_ADMIN = 'node_pending_registration_non_admin' + NODE_PENDING_REGISTRATION_ADMIN = 'node_pending_registration_admin' + NODE_PENDING_EMBARGO_TERMINATION_NON_ADMIN = 'node_pending_embargo_termination_non_admin' + NODE_PENDING_EMBARGO_TERMINATION_ADMIN = 'node_pending_embargo_termination_admin' + + # Provider notifications + PROVIDER_NEW_PENDING_SUBMISSIONS = 'provider_new_pending_submissions' + PROVIDER_REVIEWS_SUBMISSION_CONFIRMATION = 'provider_reviews_submission_confirmation' + PROVIDER_REVIEWS_MODERATOR_SUBMISSION_CONFIRMATION = 'provider_reviews_moderator_submission_confirmation' + PROVIDER_REVIEWS_WITHDRAWAL_REQUESTED = 'preprint_request_withdrawal_requested' + PROVIDER_REVIEWS_REJECT_CONFIRMATION = 'provider_reviews_reject_confirmation' + PROVIDER_REVIEWS_ACCEPT_CONFIRMATION = 'provider_reviews_accept_confirmation' + PROVIDER_REVIEWS_RESUBMISSION_CONFIRMATION = 'provider_reviews_resubmission_confirmation' + PROVIDER_REVIEWS_COMMENT_EDITED = 'provider_reviews_comment_edited' + PROVIDER_CONTRIBUTOR_ADDED_PREPRINT = 'provider_contributor_added_preprint' + PROVIDER_CONFIRM_EMAIL_MODERATION = 'provider_confirm_email_moderation' + PROVIDER_MODERATOR_ADDED = 'provider_moderator_added' + PROVIDER_CONFIRM_EMAIL_PREPRINTS = 'provider_confirm_email_preprints' + PROVIDER_USER_INVITE_PREPRINT = 'provider_user_invite_preprint' + + # Preprint notifications + PREPRINT_REQUEST_WITHDRAWAL_APPROVED = 'preprint_request_withdrawal_approved' + PREPRINT_REQUEST_WITHDRAWAL_DECLINED = 'preprint_request_withdrawal_declined' + PREPRINT_CONTRIBUTOR_ADDED_PREPRINT_NODE_FROM_OSF = 'preprint_contributor_added_preprint_node_from_osf' + + # Collections Submission notifications + COLLECTION_SUBMISSION_REMOVED_ADMIN = 'collection_submission_removed_admin' + COLLECTION_SUBMISSION_REMOVED_MODERATOR = 'collection_submission_removed_moderator' + COLLECTION_SUBMISSION_REMOVED_PRIVATE = 'collection_submission_removed_private' + COLLECTION_SUBMISSION_SUBMITTED = 'collection_submission_submitted' + COLLECTION_SUBMISSION_ACCEPTED = 'collection_submission_accepted' + COLLECTION_SUBMISSION_REJECTED = 'collection_submission_rejected' + COLLECTION_SUBMISSION_CANCEL = 'collection_submission_cancel' + + # Schema Response notifications + SCHEMA_RESPONSE_REJECTED = 'schema_response_rejected' + SCHEMA_RESPONSE_APPROVED = 'schema_response_approved' + SCHEMA_RESPONSE_SUBMITTED = 'schema_response_submitted' + SCHEMA_RESPONSE_INITIATED = 'schema_response_initiated' + + REGISTRATION_BULK_UPLOAD_FAILURE_DUPLICATES = 'registration_bulk_upload_failure_duplicates' + + @property + def instance(self): + obj, created = NotificationType.objects.get_or_create(name=self.value) + return obj + + @classmethod + def user_types(cls): + return [member for member in cls if member.name.startswith('USER_')] + + @classmethod + def node_types(cls): + return [member for member in cls if member.name.startswith('NODE_')] + + @classmethod + def preprint_types(cls): + return [member for member in cls if member.name.startswith('PREPRINT_')] + + @classmethod + def provider_types(cls): + return [member for member in cls if member.name.startswith('PROVIDER_')] + + @classmethod + def schema_response_types(cls): + return [member for member in cls if member.name.startswith('SCHEMA_RESPONSE_')] + + @classmethod + def desk_types(cls): + return [member for member in cls if member.name.startswith('DESK_')] + + notification_interval_choices = ArrayField( + base_field=models.CharField(max_length=32), + default=get_default_frequency_choices, + blank=True + ) + + name: str = models.CharField(max_length=255, unique=True, null=False, blank=False) + + object_content_type = models.ForeignKey( + ContentType, + on_delete=models.SET_NULL, + null=True, + blank=True, + help_text='Content type for subscribed objects. Null means global event.' + ) + + template: str = models.TextField( + help_text='Template used to render the event_info. Supports Django template syntax.' + ) + subject: str = models.TextField( + blank=True, + null=True, + help_text='Template used to render the subject line of email. Supports Django template syntax.' + ) + + def emit(self, user, subscribed_object=None, message_frequency=None, event_context=None): + """Emit a notification to a user by creating Notification and NotificationSubscription objects. + + Args: + user (OSFUser): The recipient of the notification. + subscribed_object (optional): The object the subscription is related to. + event_context (dict, optional): Context for rendering the notification template. + """ + from osf.models.notification_subscription import NotificationSubscription + subscription, created = NotificationSubscription.objects.get_or_create( + notification_type=self, + user=user, + content_type=ContentType.objects.get_for_model(subscribed_object) if subscribed_object else None, + object_id=subscribed_object.pk if subscribed_object else None, + defaults={'message_frequency': message_frequency}, + ) + if subscription.message_frequency == 'instantly': + Notification.objects.create( + subscription=subscription, + event_context=event_context + ).send() + + def add_user_to_subscription(self, user, *args, **kwargs): + """ + """ + from osf.models.notification_subscription import NotificationSubscription + + provider = kwargs.pop('provider', None) + node = kwargs.pop('node', None) + data = {} + if subscribed_object := provider or node: + data = { + 'object_id': subscribed_object.id, + 'content_type_id': ContentType.objects.get_for_model(subscribed_object).id, + } + + notification, created = NotificationSubscription.objects.get_or_create( + user=user, + notification_type=self, + **data, + ) + return notification + + def remove_user_from_subscription(self, user): + """ + """ + from osf.models.notification_subscription import NotificationSubscription + notification, _ = NotificationSubscription.objects.update_or_create( + user=user, + notification_type=self, + defaults={'message_frequency': FrequencyChoices.NONE.value} + ) + + def __str__(self) -> str: + return self.name + + class Meta: + verbose_name = 'Notification Type' + verbose_name_plural = 'Notification Types' diff --git a/osf/models/notifications.py b/osf/models/notifications.py index 86be3424832..41ec120b4ee 100644 --- a/osf/models/notifications.py +++ b/osf/models/notifications.py @@ -1,15 +1,16 @@ from django.contrib.postgres.fields import ArrayField from django.db import models + +from website.notifications.constants import NOTIFICATION_TYPES from .node import Node from .user import OSFUser from .base import BaseModel, ObjectIDMixin from .validators import validate_subscription_type from osf.utils.fields import NonNaiveDateTimeField -from website.notifications.constants import NOTIFICATION_TYPES from website.util import api_v2_url -class NotificationSubscription(BaseModel): +class NotificationSubscriptionLegacy(BaseModel): primary_identifier_name = '_id' _id = models.CharField(max_length=100, db_index=True, unique=False) # pxyz_wiki_updated, uabc_comment_replies @@ -29,6 +30,7 @@ class NotificationSubscription(BaseModel): class Meta: # Both PreprintProvider and RegistrationProvider default instances use "osf" as their `_id` unique_together = ('_id', 'provider') + db_table = 'osf_notificationsubscription_legacy' @classmethod def load(cls, q): @@ -95,7 +97,6 @@ def remove_user_from_subscription(self, user, save=True): if save: self.save() - class NotificationDigest(ObjectIDMixin, BaseModel): user = models.ForeignKey('OSFUser', null=True, blank=True, on_delete=models.CASCADE) provider = models.ForeignKey('AbstractProvider', null=True, blank=True, on_delete=models.CASCADE) diff --git a/osf/models/osf_group.py b/osf/models/osf_group.py deleted file mode 100644 index a9a6b3b6f56..00000000000 --- a/osf/models/osf_group.py +++ /dev/null @@ -1,576 +0,0 @@ -import logging -import functools -from django.apps import apps -from django.core.exceptions import ValidationError -from django.db import models -from django.db.models.signals import post_save -from django.dispatch import receiver -from guardian.shortcuts import assign_perm, remove_perm, get_perms, get_objects_for_group, get_group_perms -from guardian.models import GroupObjectPermissionBase, UserObjectPermissionBase - -from framework.exceptions import PermissionsError -from framework.auth.core import get_user, Auth -from framework.sentry import log_exception -from osf.exceptions import BlockedEmailError -from .base import BaseModel, ObjectIDMixin -from .mixins import GuardianMixin, Loggable -from .node import Node -from .nodelog import NodeLog -from .user import OSFUser -from .osf_grouplog import OSFGroupLog -from .validators import validate_email -from osf.utils.permissions import ADMIN, READ_NODE, WRITE, MANAGER, MEMBER, MANAGE, reduce_permissions -from osf.utils import sanitize -from website.project import signals as project_signals -from website.osf_groups import signals as group_signals -from website.util import api_v2_url - -logger = logging.getLogger(__name__) - - -class OSFGroup(GuardianMixin, Loggable, ObjectIDMixin, BaseModel): - """ - OSFGroup model. When an OSFGroup is created, a manager and member Django group are created. - Managers belong to both manager and member groups. Members belong to the member group only. - - The OSFGroup's Django member group is given permissions to nodes, so all OSFGroup members - get the same permission to the node. - """ - - name = models.TextField(blank=False) - creator = models.ForeignKey(OSFUser, - db_index=True, - related_name='osfgroups_created', - on_delete=models.SET_NULL, - null=True, blank=True) - - groups = { - 'member': ('member_group',), - 'manager': ('manage_group',), - } - group_format = 'osfgroup_{self.id}_{group}' - - def __unicode__(self): - return f'OSFGroup_{self.id}_{self.name}' - - class Meta: - permissions = ( - ('view_group', 'Can view group details'), - ('member_group', 'Has group membership'), - ('manage_group', 'Can manage group membership'), - ) - - @property - def _primary_key(self): - return self._id - - @property - def manager_group(self): - """ - OSFGroup's Django manager group object - """ - return self.get_group(MANAGER) - - @property - def member_group(self): - """ - OSFGroup's Django member group object - """ - return self.get_group(MEMBER) - - @property - def managers(self): - # All users that belong to the OSF Group's manager group - return self.manager_group.user_set.all() - - @property - def members(self): - # All members/managers belonging to this OSFGroup - - # the member group has both members and managers - return self.member_group.user_set.all() - - @property - def members_only(self): - # Users that are truly members-only and not managers - return self.members.exclude(id__in=self.managers) - - @property - def nodes(self): - """ - Returns nodes that the OSF group has permission to - """ - return get_objects_for_group(self.member_group, READ_NODE, Node) - - @property - def absolute_api_v2_url(self): - path = f'/groups/{self._id}/' - return api_v2_url(path) - - @property - def url(self): - # TODO - front end hasn't been set up - return f'/{self._primary_key}/' - - def get_absolute_url(self): - return self.absolute_api_v2_url - - def is_member(self, user): - # Checking group membership instead of permissions, because unregistered - # members have no perms - return user in self.members - - def is_manager(self, user): - # Checking group membership instead of permissions, because unregistered - # members have no perms - return user in self.managers - - def _require_manager_permission(self, auth=None): - if auth and not self.has_permission(auth.user, MANAGE): - raise PermissionsError('Must be a group manager to modify group membership.') - - def _disabled_user_check(self, user): - if user.is_disabled: - raise ValueError('Deactivated users cannot be added to OSF Groups.') - - def _enforce_one_manager(self, user): - # Group must have at least one registered manager - if (len(self.managers) == 1 and self.managers[0] == user) or not self.managers.filter(is_registered=True).exclude(id=user.id): - raise ValueError('Group must have at least one manager.') - - def _get_node_group_perms(self, node, permission): - """ - Gets expanded permissions for a node. The expanded permissions can be used - to add to the member group. - - Raises error if permission is invalid. - """ - permissions = node.groups.get(permission) - if not permissions: - raise ValueError(f'{permission} is not a valid permission.') - return permissions - - def send_member_email(self, user, permission, auth=None): - group_signals.member_added.send(self, user=user, permission=permission, auth=auth) - - def make_member(self, user, auth=None): - """Add member or downgrade manager to member - - :param user: OSFUser object, intended member - :param auth: Auth object - """ - self._require_manager_permission(auth) - self._disabled_user_check(user) - adding_member = not self.is_member(user) - if user in self.members_only: - return False - - self.member_group.user_set.add(user) - if self.is_manager(user): - self._enforce_one_manager(user) - self.manager_group.user_set.remove(user) - self.add_role_updated_log(user, MEMBER, auth) - else: - self.add_log( - OSFGroupLog.MEMBER_ADDED, - params={ - 'group': self._id, - 'user': user._id, - }, - auth=auth) - self.update_search() - - if adding_member: - self.send_member_email(user, MEMBER, auth) - - def make_manager(self, user, auth=None): - """Add manager or upgrade member to manager - - :param user: OSFUser object, intended manager - :param auth: Auth object - """ - self._require_manager_permission(auth) - self._disabled_user_check(user) - adding_member = not self.is_member(user) - if self.is_manager(user): - return False - - if not self.is_member(user): - self.add_log( - OSFGroupLog.MANAGER_ADDED, - params={ - 'group': self._id, - 'user': user._id, - }, - auth=auth) - - else: - self.add_role_updated_log(user, MANAGER, auth) - self.manager_group.user_set.add(user) - self.member_group.user_set.add(user) - self.update_search() - - if adding_member: - self.send_member_email(user, MANAGER, auth) - - def add_unregistered_member(self, fullname, email, auth, role=MEMBER): - """Add unregistered member or manager to OSFGroup - - :param fullname: string, user fullname - :param email: email, user email - :param auth: Auth object - :param role: string, "member" or "manager", default is member - """ - OSFUser = apps.get_model('osf.OSFUser') - - try: - validate_email(email) - except BlockedEmailError: - raise ValidationError('Email address domain is blocked.') - - user = get_user(email=email) - if user: - if user.is_registered or self.is_member(user): - raise ValueError('User already exists.') - else: - user = OSFUser.create_unregistered(fullname=fullname, email=email) - user.add_unclaimed_record( - self, - referrer=auth.user, - given_name=fullname, - email=email, - ) - - if role == MANAGER: - self.make_manager(user, auth=auth) - else: - self.make_member(user, auth=auth) - - return user - - def replace_contributor(self, old, new): - """ - Replacing unregistered member with a verified user - - Using "replace_contributor" language to mimic Node model, so this can be called in - the same views using to claim accounts on nodes. - """ - if not self.is_member(old): - return False - - # Remove unclaimed record for the group - if self._id in old.unclaimed_records: - del old.unclaimed_records[self._id] - old.save() - - # For the manager and member Django group attached to the OSFGroup, - # add the new user to the group, and remove the old. This - # will give the new user the appropriate permissions to the OSFGroup - for group_name in self.groups.keys(): - if self.get_group(group_name).user_set.filter(id=old.id).exists(): - self.get_group(group_name).user_set.remove(old) - self.get_group(group_name).user_set.add(new) - - self.update_search() - return True - - def remove_member(self, user, auth=None): - """Remove member or manager - - :param user: OSFUser object, member/manager to remove - :param auth: Auth object - """ - if not (auth and user == auth.user): - self._require_manager_permission(auth) - - if not self.is_member(user): - return False - self._enforce_one_manager(user) - self.manager_group.user_set.remove(user) - self.member_group.user_set.remove(user) - - self.add_log( - OSFGroupLog.MEMBER_REMOVED, - params={ - 'group': self._id, - 'user': user._id, - }, - auth=auth) - - self.update_search() - - for node in self.nodes: - project_signals.contributor_removed.send(node, user=user) - node.disconnect_addons(user, auth) - - def set_group_name(self, name, auth=None): - """Set the name of the group. - - :param str new Name: The new osf group name - :param auth: Auth object - """ - self._require_manager_permission(auth) - new_name = sanitize.strip_html(name) - # Title hasn't changed after sanitzation, bail out - if self.name == new_name: - return False - old_name = self.name - self.name = new_name - - self.add_log( - OSFGroupLog.EDITED_NAME, - params={ - 'group': self._id, - 'name_original': old_name - }, - auth=auth) - self.update_search() - for node in self.nodes: - node.update_search() - - def add_group_to_node(self, node, permission=WRITE, auth=None): - """Gives the OSF Group permissions to the node. Called from node model. - - :param obj Node - :param str Highest permission to grant, 'read', 'write', or 'admin' - :param auth: Auth object - """ - self._require_manager_permission(auth) - - current_perm = self.get_permission_to_node(node) - if current_perm: - if current_perm == permission: - return False - # If group already has perms to node, update permissions instead - return self.update_group_permissions_to_node(node, permission, auth) - - permissions = self._get_node_group_perms(node, permission) - for perm in permissions: - assign_perm(perm, self.member_group, node) - - params = { - 'group': self._id, - 'node': node._id, - 'permission': permission - } - - self.add_log( - OSFGroupLog.NODE_CONNECTED, - params=params, - auth=auth) - - self.add_corresponding_node_log(node, NodeLog.GROUP_ADDED, params, auth) - node.update_search() - - for user in self.members: - group_signals.group_added_to_node.send(self, node=node, user=user, permission=permission, auth=auth) - - def update_group_permissions_to_node(self, node, permission=WRITE, auth=None): - """Updates the OSF Group permissions to the node. Called from node model. - - :param obj Node - :param str Highest permission to grant, 'read', 'write', or 'admin' - :param auth: Auth object - """ - if self.get_permission_to_node(node) == permission: - return False - permissions = self._get_node_group_perms(node, permission) - to_remove = set(get_perms(self.member_group, node)).difference(permissions) - for perm in to_remove: - remove_perm(perm, self.member_group, node) - for perm in permissions: - assign_perm(perm, self.member_group, node) - params = { - 'group': self._id, - 'node': node._id, - 'permission': permission - } - self.add_log( - OSFGroupLog.NODE_PERMS_UPDATED, - params=params, - auth=auth - ) - - self.add_corresponding_node_log(node, NodeLog.GROUP_UPDATED, params, auth) - - def remove_group_from_node(self, node, auth): - """Removes the OSFGroup from the node. Called from node model. - - :param obj Node - """ - if not self.get_permission_to_node(node): - return False - for perm in node.groups[ADMIN]: - remove_perm(perm, self.member_group, node) - params = { - 'group': self._id, - 'node': node._id, - } - self.add_log( - OSFGroupLog.NODE_DISCONNECTED, - params=params, - auth=auth) - - self.add_corresponding_node_log(node, NodeLog.GROUP_REMOVED, params, auth) - node.update_search() - - for user in self.members: - node.disconnect_addons(user, auth) - project_signals.contributor_removed.send(node, user=user) - - def get_permission_to_node(self, node): - """ - Returns the permission this OSF group has to the given node - - :param node: Node object - """ - perms = get_group_perms(self.member_group, node) - return reduce_permissions(perms) if perms else None - - def has_permission(self, user, permission): - """Returns whether the user has the given permission to the OSFGroup - :param user: Auth object - :param role: member/manange permission - :return Boolean - """ - if not user or user.is_anonymous: - return False - - # Using get_group_perms to get permissions that are inferred through - # group membership - not inherited from superuser status - return '{}_{}'.format(permission, 'group') in get_group_perms(user, self) - - def remove_group(self, auth=None): - """Removes the OSFGroup and associated manager and member django groups - :param auth: Auth object - """ - self._require_manager_permission(auth) - group_id = self._id - members = list(self.members.values_list('id', flat=True)) - nodes = self.nodes - - self.member_group.delete() - self.manager_group.delete() - self.delete() - self.update_search(deleted_id=group_id) - - for user in OSFUser.objects.filter(id__in=members): - for node in nodes: - node.disconnect_addons(user, auth) - params = { - 'group': group_id, - 'node': node._id, - } - self.add_corresponding_node_log(node, NodeLog.GROUP_REMOVED, params, auth) - project_signals.contributor_removed.send(node, user=user) - node.update_search() - - def save(self, *args, **kwargs): - first_save = not bool(self.pk) - ret = super().save(*args, **kwargs) - if first_save: - self.update_group_permissions() - self.make_manager(self.creator) - - return ret - - def add_role_updated_log(self, user, role, auth=None): - """Creates a log when role changes - :param auth: Auth object - """ - self.add_log( - OSFGroupLog.ROLE_UPDATED, - params={ - 'group': self._id, - 'new_role': role, - 'user': user._id, - }, - auth=auth) - - def add_corresponding_node_log(self, node, action, params, auth): - """ Used for logging OSFGroup-related action to nodes - for example, - adding a group to a node. - - :param node: Node object - :param action: string, Node log action - :param params: dict, log params - """ - node.add_log( - action=action, - params=params, - auth=auth, - save=True - ) - - def add_log(self, action, params, auth, log_date=None, save=True): - """Create OSFGroupLog - :param action: string, OSFGroup log action - :param params: dict, log params - """ - user = None - if auth: - user = auth.user - - log = OSFGroupLog( - action=action, user=user, - params=params, group=self - ) - - log.save() - - self._complete_add_log(log, action, user, save) - return log - - def update_search(self, deleted_id=None): - from website import search - - try: - search.search.update_group(self, bulk=False, async_update=True, deleted_id=deleted_id) - except search.exceptions.SearchUnavailableError as e: - logger.exception(e) - log_exception(e) - - @classmethod - def bulk_update_search(cls, groups, index=None): - from website import search - try: - serialize = functools.partial(search.search.update_group, index=index, bulk=True, async_update=False) - search.search.bulk_update_nodes(serialize, groups, index=index) - except search.exceptions.SearchUnavailableError as e: - logger.exception(e) - log_exception(e) - - -@receiver(post_save, sender=OSFGroup) -def add_project_created_log(sender, instance, created, **kwargs): - if created: - log_action = OSFGroupLog.GROUP_CREATED - log_params = { - 'group': instance._id, - } - - instance.add_log( - log_action, - params=log_params, - auth=Auth(user=instance.creator), - log_date=instance.created, - save=True, - ) - - -class OSFGroupUserObjectPermission(UserObjectPermissionBase): - """ - Direct Foreign Key Table for guardian - User models - we typically add object - perms directly to Django groups instead of users, so this will be used infrequently - """ - content_object = models.ForeignKey(OSFGroup, on_delete=models.CASCADE) - - -class OSFGroupGroupObjectPermission(GroupObjectPermissionBase): - """ - Direct Foreign Key Table for guardian - Group models. Makes permission checks faster. - - This table gives a Django group a particular permission to an OSF Group. - (Every time an OSFGroup is created, a Django member group, and Django manager group are created. - The member group is given member perms, manager group has manager perms.) - """ - content_object = models.ForeignKey(OSFGroup, on_delete=models.CASCADE) diff --git a/osf/models/osf_grouplog.py b/osf/models/osf_grouplog.py deleted file mode 100644 index 38ef2ee3551..00000000000 --- a/osf/models/osf_grouplog.py +++ /dev/null @@ -1,52 +0,0 @@ -from django.db import models -from .base import BaseModel, ObjectIDMixin -from osf.utils.datetime_aware_jsonfield import DateTimeAwareJSONField -from website.util import api_v2_url - - -class OSFGroupLog(ObjectIDMixin, BaseModel): - DATE_FORMAT = '%m/%d/%Y %H:%M UTC' - - GROUP_CREATED = 'group_created' - - MEMBER_ADDED = 'member_added' - MANAGER_ADDED = 'manager_added' - MEMBER_REMOVED = 'member_removed' - ROLE_UPDATED = 'role_updated' - EDITED_NAME = 'edit_name' - NODE_CONNECTED = 'node_connected' - NODE_PERMS_UPDATED = 'node_permissions_updated' - NODE_DISCONNECTED = 'node_disconnected' - - actions = ([GROUP_CREATED, MEMBER_ADDED, MANAGER_ADDED, MEMBER_REMOVED, ROLE_UPDATED, - EDITED_NAME, NODE_CONNECTED, NODE_PERMS_UPDATED, NODE_DISCONNECTED]) - - action_choices = [(action, action.upper()) for action in actions] - - action = models.CharField(max_length=255, db_index=True) - params = DateTimeAwareJSONField(default=dict) - should_hide = models.BooleanField(default=False) - user = models.ForeignKey('OSFUser', related_name='group_logs', db_index=True, - null=True, blank=True, on_delete=models.CASCADE) - group = models.ForeignKey('OSFGroup', related_name='logs', - db_index=True, null=True, blank=True, on_delete=models.CASCADE) - - def __unicode__(self): - return ('({self.action!r}, user={self.user!r}, group={self.group!r}, params={self.params!r}) ' - 'with id {self.id!r}').format(self=self) - - class Meta: - ordering = ['-created'] - get_latest_by = 'created' - - @property - def absolute_api_v2_url(self): - path = f'/logs/{self._id}/' - return api_v2_url(path) - - def get_absolute_url(self): - return self.absolute_api_v2_url - - @property - def absolute_url(self): - return self.absolute_api_v2_url diff --git a/osf/models/private_link.py b/osf/models/private_link.py index af861ccf9ff..a29c854e659 100644 --- a/osf/models/private_link.py +++ b/osf/models/private_link.py @@ -1,6 +1,4 @@ from django.db import models -from django.dispatch import receiver -from django.core.exceptions import ValidationError from framework.utils import iso8601format @@ -43,13 +41,3 @@ def to_json(self): for x in self.nodes.filter(is_deleted=False)], 'anonymous': self.anonymous } - - -##### Signal listeners ##### -@receiver(models.signals.m2m_changed, sender=PrivateLink.nodes.through) -def check_if_private_link_is_to_quickfiles(sender, instance, action, reverse, model, pk_set, **kwargs): - from .node import AbstractNode - - if action == 'pre_add' and pk_set: - if model == AbstractNode and model.objects.get(id=list(pk_set)[0]).is_quickfiles: - raise ValidationError('A private link cannot be added to a QuickFilesNode') diff --git a/osf/models/provider.py b/osf/models/provider.py index 2ee920a77e5..b8dacc174bf 100644 --- a/osf/models/provider.py +++ b/osf/models/provider.py @@ -19,7 +19,7 @@ from .brand import Brand from .citation import CitationStyle from .licenses import NodeLicense -from .notifications import NotificationSubscription +from .notifications import NotificationSubscriptionLegacy from .storage import ProviderAssetFile from .subject import Subject from osf.utils.datetime_aware_jsonfield import DateTimeAwareJSONField @@ -464,7 +464,7 @@ def create_provider_auth_groups(sender, instance, created, **kwargs): def create_provider_notification_subscriptions(sender, instance, created, **kwargs): if created: for subscription in instance.DEFAULT_SUBSCRIPTIONS: - NotificationSubscription.objects.get_or_create( + NotificationSubscriptionLegacy.objects.get_or_create( _id=f'{instance._id}_{subscription}', event_name=subscription, provider=instance diff --git a/osf/models/quickfiles.py b/osf/models/quickfiles.py deleted file mode 100644 index 2f0fa576e93..00000000000 --- a/osf/models/quickfiles.py +++ /dev/null @@ -1,93 +0,0 @@ -import logging - -from .node import ( - AbstractNode, - AbstractNodeManager, - Node -) -from .nodelog import NodeLog - -from osf.exceptions import NodeStateError - - -logger = logging.getLogger(__name__) - - -class QuickFilesNodeManager(AbstractNodeManager): - - def create_for_user(self, user): - possessive_title = get_quickfiles_project_title(user) - - quickfiles, created = QuickFilesNode.objects.get_or_create( - title=possessive_title, - creator=user - ) - - if not created: - raise NodeStateError('Users may only have one quickfiles project') - - quickfiles.add_addon('osfstorage', auth=None, log=False) - - return quickfiles - - def get_for_user(self, user): - try: - return QuickFilesNode.objects.get(creator=user) - except AbstractNode.DoesNotExist: - return Node.objects.filter( - logs__action=NodeLog.MIGRATED_QUICK_FILES, - creator=user - ).order_by('created').first() # Returns None if there are none - - -class QuickFilesNode(AbstractNode): - __guid_min_length__ = 10 - - objects = QuickFilesNodeManager() - - def __init__(self, *args, **kwargs): - kwargs['is_public'] = True - super().__init__(*args, **kwargs) - - def remove_node(self, auth, date=None): - # QuickFilesNodes are only delete-able for disabled users - # This is only done when doing a GDPR-delete - if auth.user.is_disabled: - super().remove_node(auth=auth, date=date) - else: - raise NodeStateError('A QuickFilesNode may not be deleted.') - - def set_privacy(self, permissions, *args, **kwargs): - raise NodeStateError('You may not set privacy for a QuickFilesNode.') - - def add_contributor(self, contributor, *args, **kwargs): - if contributor == self.creator: - return super().add_contributor(contributor, *args, **kwargs) - raise NodeStateError('A QuickFilesNode may not have additional contributors.') - - def clone(self): - raise NodeStateError('A QuickFilesNode may not be forked, used as a template, or registered.') - - def add_addon(self, name, auth, log=True): - if name != 'osfstorage': - raise NodeStateError('A QuickFilesNode can only have the osfstorage addon.') - return super().add_addon(name, auth, log) - - @property - def is_registration(self): - """For v1 compat.""" - return False - - @property - def is_collection(self): - """For v1 compat.""" - return False - - @property - def is_quickfiles(self): - return True - - -def get_quickfiles_project_title(user): - possessive_title_name = user.fullname + "'s" if user.fullname[-1] != 's' else user.fullname + "'" - return f'{possessive_title_name} Quick Files' diff --git a/osf/models/sanctions.py b/osf/models/sanctions.py index 6d8b904b4b9..308d5c82a9f 100644 --- a/osf/models/sanctions.py +++ b/osf/models/sanctions.py @@ -20,6 +20,7 @@ from osf.utils import tokens from osf.utils.machines import ApprovalsMachine from osf.utils.workflows import ApprovalStates, SanctionTypes +from osf.models import NotificationType VIEW_PROJECT_URL_TEMPLATE = osf_settings.DOMAIN + '{node_id}/' @@ -375,6 +376,12 @@ def _format_or_empty(template, context): return template.format(**context) return '' + def _get_authoriser_notification_type(self): + return None + + def _get_non_authoriser_notification_type(self): + return None + def _view_url(self, user_id, node): return self._format_or_empty(self.VIEW_URL_TEMPLATE, self._view_url_context(user_id, node)) @@ -412,6 +419,13 @@ def _notify_authorizer(self, authorizer, node): else: raise NotImplementedError() + try: + notification_type = self._get_authoriser_notification_type() + except NotificationType.DoesNotExist: + raise NotImplementedError() + if notification_type: + notification_type.emit(authorizer, context=context) + def _notify_non_authorizer(self, user, node): context = self._email_template_context(user, node) if self.NON_AUTHORIZER_NOTIFY_EMAIL_TEMPLATE: @@ -420,6 +434,9 @@ def _notify_non_authorizer(self, user, node): else: raise NotImplementedError + if notification_type := self._get_non_authoriser_notification_type(): + notification_type.emit(user, context=context) + def ask(self, group): """ :param list group: List of (user, node) tuples containing contributors to notify about the @@ -470,6 +487,9 @@ class Embargo(SanctionCallbackMixin, EmailApprovableSanction): AUTHORIZER_NOTIFY_EMAIL_TEMPLATE = mails.PENDING_EMBARGO_ADMIN NON_AUTHORIZER_NOTIFY_EMAIL_TEMPLATE = mails.PENDING_EMBARGO_NON_ADMIN + AUTHORIZER_NOTIFY_EMAIL_TYPE = 'pending_embargo_admin' + NON_AUTHORIZER_NOTIFY_EMAIL_TYPE = 'pending_embargo_non_admin' + VIEW_URL_TEMPLATE = VIEW_PROJECT_URL_TEMPLATE APPROVE_URL_TEMPLATE = osf_settings.DOMAIN + 'token_action/{node_id}/?token={token}' REJECT_URL_TEMPLATE = osf_settings.DOMAIN + 'token_action/{node_id}/?token={token}' @@ -502,6 +522,22 @@ def embargo_end_date(self): def pending_registration(self): return not self.for_existing_registration and self.is_pending_approval + def _get_authoriser_notification_type(self): + notification_type = NotificationType.objects.filter(name=self.AUTHORIZER_NOTIFY_EMAIL_TYPE) + if not notification_type.exists(): + raise NotificationType.DoesNotExist( + f'NotificationType with name {self.AUTHORIZER_NOTIFY_EMAIL_TYPE} does not exist.' + ) + return notification_type.first() + + def _get_non_authoriser_notification_type(self): + notification_type = NotificationType.objects.get(name=self.NON_AUTHORIZER_NOTIFY_EMAIL_TYPE) + if not notification_type.exists(): + raise NotificationType.DoesNotExist( + f'NotificationType with name {self.NON_AUTHORIZER_NOTIFY_EMAIL_TYPE} does not exist.' + ) + return notification_type.first() + def _get_registration(self): return self.registrations.first() @@ -650,6 +686,9 @@ class Retraction(EmailApprovableSanction): AUTHORIZER_NOTIFY_EMAIL_TEMPLATE = mails.PENDING_RETRACTION_ADMIN NON_AUTHORIZER_NOTIFY_EMAIL_TEMPLATE = mails.PENDING_RETRACTION_NON_ADMIN + AUTHORIZER_NOTIFY_EMAIL_TYPE = 'pending_retraction_admin' + NON_AUTHORIZER_NOTIFY_EMAIL_TYPE = 'pending_retraction_non_admin' + VIEW_URL_TEMPLATE = VIEW_PROJECT_URL_TEMPLATE APPROVE_URL_TEMPLATE = osf_settings.DOMAIN + 'token_action/{node_id}/?token={token}' REJECT_URL_TEMPLATE = osf_settings.DOMAIN + 'token_action/{node_id}/?token={token}' @@ -658,6 +697,22 @@ class Retraction(EmailApprovableSanction): justification = models.CharField(max_length=2048, null=True, blank=True) date_retracted = NonNaiveDateTimeField(null=True, blank=True) + def _get_authoriser_notification_type(self): + notification_type = NotificationType.objects.filter(name=self.AUTHORIZER_NOTIFY_EMAIL_TYPE) + if not notification_type.exists(): + raise NotificationType.DoesNotExist( + f'NotificationType with name {self.AUTHORIZER_NOTIFY_EMAIL_TYPE} does not exist.' + ) + return notification_type.first() + + def _get_non_authoriser_notification_type(self): + notification_type = NotificationType.objects.get(name=self.NON_AUTHORIZER_NOTIFY_EMAIL_TYPE) + if not notification_type.exists(): + raise NotificationType.DoesNotExist( + f'NotificationType with name {self.NON_AUTHORIZER_NOTIFY_EMAIL_TYPE} does not exist.' + ) + return notification_type.first() + def _get_registration(self): Registration = apps.get_model('osf.Registration') parent_registration = Registration.objects.get(retraction=self) @@ -770,6 +825,9 @@ class RegistrationApproval(SanctionCallbackMixin, EmailApprovableSanction): AUTHORIZER_NOTIFY_EMAIL_TEMPLATE = mails.PENDING_REGISTRATION_ADMIN NON_AUTHORIZER_NOTIFY_EMAIL_TEMPLATE = mails.PENDING_REGISTRATION_NON_ADMIN + AUTHORIZER_NOTIFY_EMAIL_TYPE = 'pending_registration_admin' + NON_AUTHORIZER_NOTIFY_EMAIL_TYPE = 'pending_registration_non_admin' + VIEW_URL_TEMPLATE = VIEW_PROJECT_URL_TEMPLATE APPROVE_URL_TEMPLATE = osf_settings.DOMAIN + 'token_action/{node_id}/?token={token}' REJECT_URL_TEMPLATE = osf_settings.DOMAIN + 'token_action/{node_id}/?token={token}' @@ -788,6 +846,22 @@ def find_approval_backlog(): guid=models.F('_id') ).order_by('-initiation_date') + def _get_authoriser_notification_type(self): + notification_type = NotificationType.objects.filter(name=self.AUTHORIZER_NOTIFY_EMAIL_TYPE) + if not notification_type.exists(): + raise NotificationType.DoesNotExist( + f'NotificationType with name {self.AUTHORIZER_NOTIFY_EMAIL_TYPE} does not exist.' + ) + return notification_type.first() + + def _get_non_authoriser_notification_type(self): + notification_type = NotificationType.objects.get(name=self.NON_AUTHORIZER_NOTIFY_EMAIL_TYPE) + if not notification_type.exists(): + raise NotificationType.DoesNotExist( + f'NotificationType with name {self.NON_AUTHORIZER_NOTIFY_EMAIL_TYPE} does not exist.' + ) + return notification_type.first() + def _get_registration(self): return self.registrations.first() @@ -935,6 +1009,9 @@ class EmbargoTerminationApproval(EmailApprovableSanction): AUTHORIZER_NOTIFY_EMAIL_TEMPLATE = mails.PENDING_EMBARGO_TERMINATION_ADMIN NON_AUTHORIZER_NOTIFY_EMAIL_TEMPLATE = mails.PENDING_EMBARGO_TERMINATION_NON_ADMIN + AUTHORIZER_NOTIFY_EMAIL_TYPE = 'pending_embargo_termination_admin' + NON_AUTHORIZER_NOTIFY_EMAIL_TYPE = 'pending_embargo_termination_non_admin' + VIEW_URL_TEMPLATE = VIEW_PROJECT_URL_TEMPLATE APPROVE_URL_TEMPLATE = osf_settings.DOMAIN + 'token_action/{node_id}/?token={token}' REJECT_URL_TEMPLATE = osf_settings.DOMAIN + 'token_action/{node_id}/?token={token}' @@ -949,6 +1026,22 @@ def is_moderated(self): def _get_registration(self): return self.embargoed_registration + def _get_authoriser_notification_type(self): + notification_type = NotificationType.objects.filter(name=self.AUTHORIZER_NOTIFY_EMAIL_TYPE) + if not notification_type.exists(): + raise NotificationType.DoesNotExist( + f'NotificationType with name {self.AUTHORIZER_NOTIFY_EMAIL_TYPE} does not exist.' + ) + return notification_type.first() + + def _get_non_authoriser_notification_type(self): + notification_type = NotificationType.objects.get(name=self.NON_AUTHORIZER_NOTIFY_EMAIL_TYPE) + if not notification_type.exists(): + raise NotificationType.DoesNotExist( + f'NotificationType with name {self.NON_AUTHORIZER_NOTIFY_EMAIL_TYPE} does not exist.' + ) + return notification_type.first() + def _view_url_context(self, user_id, node): registration = node or self._get_registration() return { diff --git a/osf/models/user.py b/osf/models/user.py index 008f2affe60..ede9c96d5e5 100644 --- a/osf/models/user.py +++ b/osf/models/user.py @@ -14,7 +14,6 @@ import pytz from dirtyfields import DirtyFieldsMixin -from django.apps import apps from django.conf import settings from django.contrib.auth.base_user import AbstractBaseUser, BaseUserManager from django.contrib.auth.hashers import check_password @@ -24,7 +23,6 @@ from django.db.models import Count, Exists, OuterRef from django.db.models.signals import post_save from django.utils import timezone -from guardian.shortcuts import get_objects_for_user from framework import sentry from framework.auth import Auth, signals, utils @@ -57,7 +55,7 @@ from osf.utils.fields import NonNaiveDateTimeField, LowercaseEmailField, ensure_str from osf.utils.names import impute_names from osf.utils.requests import check_select_for_update -from osf.utils.permissions import API_CONTRIBUTOR_PERMISSIONS, MANAGER, MEMBER, MANAGE, ADMIN +from osf.utils.permissions import API_CONTRIBUTOR_PERMISSIONS, MANAGER, MEMBER, ADMIN from website import settings as website_settings from website import filters, mails from website.project import new_bookmark_collection @@ -69,8 +67,6 @@ logger = logging.getLogger(__name__) -MAX_QUICKFILES_MERGE_RENAME_ATTEMPTS = 1000 - def get_default_mailing_lists(): return {'Open Science Framework Help': True} @@ -644,14 +640,6 @@ def is_authenticated(self): # Needed for django compat def is_anonymous(self): return False - @property - def osf_groups(self): - """ - OSFGroups that the user belongs to - """ - OSFGroup = apps.get_model('osf.OSFGroup') - return get_objects_for_user(self, 'member_group', OSFGroup, with_superuser=False) - def is_institutional_admin_at(self, institution): """ Checks if user is admin of a specific institution. @@ -818,9 +806,6 @@ def merge_user(self, user): # - projects where the user was a contributor (group member only are not included). for node in user.contributed: - # Skip quickfiles - if node.is_quickfiles: - continue user_perms = Contributor(node=node, user=user).permission # if both accounts are contributor of the same project if node.is_contributor(self) and node.is_contributor(user): @@ -845,10 +830,9 @@ def merge_user(self, user): user.collection_set.exclude(is_bookmark_collection=True).update(creator=self) from .files import BaseFileNode - from .quickfiles import QuickFilesNode # - projects where the user was the creator - user.nodes_created.exclude(type=QuickFilesNode._typedmodels_type).update(creator=self) + user.nodes_created.update(creator=self) # - file that the user has checked_out, import done here to prevent import error for file_node in BaseFileNode.files_checked_out(user=user): @@ -861,17 +845,7 @@ def merge_user(self, user): # Transfer user's draft registrations self._merge_user_draft_registrations(user) - # transfer group membership - for group in user.osf_groups: - if not group.is_manager(self): - if group.has_permission(user, MANAGE): - group.make_manager(self) - else: - group.make_member(self) - group.remove_member(user) - # finalize the merge - remove_sessions_for_user(user) # - username is set to the GUID so the merging user can set it primary @@ -1059,13 +1033,6 @@ def save(self, *args, **kwargs): if self.SEARCH_UPDATE_FIELDS.intersection(dirty_fields) and self.is_confirmed: self.update_search() self.update_search_nodes_contributors() - if 'fullname' in dirty_fields: - from .quickfiles import get_quickfiles_project_title, QuickFilesNode - - quickfiles = QuickFilesNode.objects.filter(creator=self).first() - if quickfiles: - quickfiles.title = get_quickfiles_project_title(self) - quickfiles.save() if 'username' in dirty_fields: for list_name, subscription in self.mailchimp_mailing_lists.items(): if subscription: @@ -1469,7 +1436,7 @@ def confirm_spam(self, domains=None, save=True, train_spam_services=False): super().confirm_spam(domains=domains, save=save, train_spam_services=train_spam_services) # Don't train on resources merely associated with spam user - for node in self.nodes.filter(is_public=True, is_deleted=False).exclude(type='osf.quickfilesnode'): + for node in self.nodes.filter(is_public=True, is_deleted=False): node.confirm_spam(train_spam_services=train_spam_services) for preprint in self.preprints.filter(is_public=True, deleted__isnull=True): preprint.confirm_spam(train_spam_services=train_spam_services) @@ -1479,7 +1446,7 @@ def confirm_ham(self, save=False, train_spam_services=False): super().confirm_ham(save=save, train_spam_services=train_spam_services) # Don't train on resources merely associated with spam user - for node in self.nodes.filter().exclude(type='osf.quickfilesnode'): + for node in self.nodes.filter(): node.confirm_ham(save=save, train_spam_services=train_spam_services) for preprint in self.preprints.filter(): preprint.confirm_ham(save=save, train_spam_services=train_spam_services) @@ -1524,9 +1491,6 @@ def update_search_nodes(self): for node in self.contributor_to: node.update_search() - for group in self.osf_groups: - group.update_search() - def update_date_last_login(self, login_time=None): self.date_last_login = login_time or timezone.now() @@ -1673,7 +1637,6 @@ def add_unclaimed_record(self, claim_origin, referrer, given_name, email=None, s """ from .provider import AbstractProvider - from .osf_group import OSFGroup if not skip_referrer_permissions: if isinstance(claim_origin, AbstractProvider): @@ -1681,12 +1644,6 @@ def add_unclaimed_record(self, claim_origin, referrer, given_name, email=None, s raise PermissionsError( f'Referrer does not have permission to add a moderator to provider {claim_origin._id}' ) - - elif isinstance(claim_origin, OSFGroup): - if not claim_origin.has_permission(referrer, MANAGE): - raise PermissionsError( - f'Referrer does not have permission to add a member to {claim_origin._id}' - ) else: if not claim_origin.has_permission(referrer, ADMIN): raise PermissionsError( @@ -2025,9 +1982,6 @@ def gdpr_delete(self): hard_delete=True ) - # A Potentially out of date check that user isn't a member of a OSFGroup - self._validate_osf_groups() - # Finally delete the user's info. self._clear_identifying_information() @@ -2084,20 +2038,6 @@ def _validate_and_remove_resource_for_gdpr_delete(self, resources, hard_delete): logger.info(f'Soft-deleting {entity.__class__.__name__} (pk: {entity.pk})...') entity.remove_node(auth=Auth(self)) - def _validate_osf_groups(self): - """ - This method ensures a user isn't in an OSFGroup before deleting them.. - """ - for group in self.osf_groups: - if not group.managers.exclude(id=self.id).filter(is_registered=True).exists() and group.members.exclude( - id=self.id).exists(): - raise UserStateError( - f'You cannot delete this user because they are the only registered manager of OSFGroup {group._id} that contains other members.') - elif len(group.managers) == 1 and group.managers[0] == self: - group.remove_group() - else: - group.remove_member(self) - def _clear_identifying_information(self): ''' This method ensures a user's info is deleted during a GDPR delete @@ -2152,10 +2092,9 @@ def has_resources(self): from osf.models import Preprint nodes = self.nodes.filter(deleted__isnull=True).exists() - groups = self.osf_groups.exists() preprints = Preprint.objects.filter(_contributors=self, ever_public=True, deleted__isnull=True).exists() - return groups or nodes or preprints + return nodes or preprints class Meta: # custom permissions for use in the OSF Admin App diff --git a/osf_tests/factories.py b/osf_tests/factories.py index 7ad8885e1ad..d1c7e640250 100644 --- a/osf_tests/factories.py +++ b/osf_tests/factories.py @@ -374,15 +374,6 @@ def _create(cls, *args, **kwargs): return obj -class OSFGroupFactory(DjangoModelFactory): - name = factory.Faker('company') - created = factory.LazyFunction(timezone.now) - creator = factory.SubFactory(AuthUserFactory) - - class Meta: - model = models.OSFGroup - - class RegistrationFactory(BaseNodeFactory): creator = None @@ -1049,9 +1040,20 @@ def handle_callback(self, response): } +class NotificationSubscriptionLegacyFactory(DjangoModelFactory): + class Meta: + model = models.NotificationSubscriptionLegacy + + class NotificationSubscriptionFactory(DjangoModelFactory): class Meta: model = models.NotificationSubscription + notification_type = factory.LazyAttribute(lambda o: NotificationTypeFactory()) + + +class NotificationTypeFactory(DjangoModelFactory): + class Meta: + model = models.NotificationType def make_node_lineage(): diff --git a/osf_tests/management_commands/test_check_crossref_dois.py b/osf_tests/management_commands/test_check_crossref_dois.py index c4e37d9c389..993c7e6731e 100644 --- a/osf_tests/management_commands/test_check_crossref_dois.py +++ b/osf_tests/management_commands/test_check_crossref_dois.py @@ -8,12 +8,13 @@ from osf_tests.factories import PreprintFactory -from website import settings, mails +from website import settings from osf.management.commands.check_crossref_dois import check_crossref_dois, report_stuck_dois @pytest.mark.django_db +@pytest.mark.usefixtures('mock_send_grid') class TestCheckCrossrefDOIs: @pytest.fixture() @@ -60,15 +61,7 @@ def test_check_crossref_dois(self, crossref_response, stuck_preprint, preprint): assert stuck_preprint.identifiers.count() == 1 assert stuck_preprint.identifiers.first().value == doi - @mock.patch('website.mails.send_mail') - def test_report_stuck_dois(self, mock_email, stuck_preprint): + def test_report_stuck_dois(self, mock_send_grid, stuck_preprint): report_stuck_dois(dry_run=False) - guid = stuck_preprint.guids.first()._id - mock_email.assert_called_with( - guids=guid, - time_since_published=2, - mail=mails.CROSSREF_DOIS_PENDING, - pending_doi_count=1, - to_addr=settings.OSF_SUPPORT_EMAIL - ) + mock_send_grid.assert_called() diff --git a/osf_tests/management_commands/test_email_all_users.py b/osf_tests/management_commands/test_email_all_users.py index c10c84b49d1..14df656ee52 100644 --- a/osf_tests/management_commands/test_email_all_users.py +++ b/osf_tests/management_commands/test_email_all_users.py @@ -1,13 +1,12 @@ -from unittest import mock import pytest from django.utils import timezone from osf_tests.factories import UserFactory -from website import mails from osf.management.commands.email_all_users import email_all_users +@pytest.mark.usefixtures('mock_send_grid') class TestEmailAllUsers: @pytest.fixture() @@ -42,32 +41,25 @@ def unregistered_user(self): return UserFactory(is_registered=False) @pytest.mark.django_db - @mock.patch('website.mails.send_mail') - def test_email_all_users_dry(self, mock_email, superuser): + def test_email_all_users_dry(self, mock_send_grid, superuser): email_all_users('TOU_NOTIF', dry_run=True) - mock_email.assert_called_with( - to_addr=superuser.email, - mail=mails.TOU_NOTIF, - given_name=superuser.given_name - ) + mock_send_grid.assert_called() @pytest.mark.django_db - @mock.patch('website.mails.send_mail') def test_dont_email_inactive_users( - self, mock_email, deleted_user, inactive_user, unconfirmed_user, unregistered_user): + self, mock_send_grid, deleted_user, inactive_user, unconfirmed_user, unregistered_user): email_all_users('TOU_NOTIF') - mock_email.assert_not_called() + mock_send_grid.assert_not_called() @pytest.mark.django_db - @mock.patch('website.mails.send_mail') - def test_email_all_users_offset(self, mock_email, user, user2): + def test_email_all_users_offset(self, mock_send_grid, user, user2): email_all_users('TOU_NOTIF', offset=1, start_id=0) email_all_users('TOU_NOTIF', offset=1, start_id=1) email_all_users('TOU_NOTIF', offset=1, start_id=2) - assert mock_email.call_count == 2 + assert mock_send_grid.call_count == 2 diff --git a/osf_tests/management_commands/test_fix_quickfiles_waterbutler_logs.py b/osf_tests/management_commands/test_fix_quickfiles_waterbutler_logs.py deleted file mode 100644 index 04b4619e108..00000000000 --- a/osf_tests/management_commands/test_fix_quickfiles_waterbutler_logs.py +++ /dev/null @@ -1,90 +0,0 @@ -import pytest -from osf.management.commands.fix_quickfiles_waterbutler_logs import fix_quickfiles_waterbutler_logs -from osf_tests.factories import ProjectFactory -from osf.models import NodeLog - - -@pytest.mark.django_db -class TestFixQuickFilesLogs: - - @pytest.fixture() - def node(self): - return ProjectFactory() - - @pytest.fixture() - def node_log_files_added(self, node): - return NodeLog( - action='osf_storage_file_added', - node=node, - params={ - 'contributors': [], - 'params_node': { - 'id': 'jpmxy', - 'title': "John Tordoff's Quick Files" - }, - 'params_project': None, - 'path': '/test.json', - 'pointer': None, - 'preprint_provider': None, - 'urls': { - 'view': f'/{node._id}/files/osfstorage/622aad8d1e399c0c296017b0/?pid={node._id}', - 'download': f'/{node._id}/files/osfstorage/622aad8d1e399c0c296017b0/?pid={node._id}?action=download' - } - } - ).save() - - @pytest.fixture() - def node_log_files_renamed(self, node): - return NodeLog( - action='addon_file_renamed', - node=node, - params={ - 'contributors': [], - 'destination': { - 'materialized': 'test-JATS1.xml', - 'url': '/project/jpmxy/files/osfstorage/622aad914ef4bb0ac0333f9f/', - 'addon': 'OSF Storage', - 'node_url': '/jpmxy/', - 'resource': 'jpmxy', - 'node_title': "John Tordoff's Quick Files" - }, - 'params_node': { - 'id': 'jpmxy', - 'title': "John Tordoff's Quick Files" - }, - 'params_project': None, - 'pointer': None, - 'preprint_provider': None, - 'source': { - 'materialized': 'test-JATS.xml', - 'url': '/project/jpmxy/files/osfstorage/622aad914ef4bb0ac0333f9f/', - 'addon': 'OSF Storage', - 'node_url': '/jpmxy/', - 'resource': 'jpmxy', - 'node_title': "John Tordoff's Quick Files" - } - } - ).save() - - @pytest.mark.enable_enqueue_task - def test_fix_quickfiles_waterbutler_logs_files_added(self, node, node_log_files_added): - NodeLog(node=node, action=NodeLog.MIGRATED_QUICK_FILES).save() - fix_quickfiles_waterbutler_logs() - log = node.logs.all().get(action='osf_storage_file_added') - guid = node.guids.last()._id - - assert log.params['urls'] == { - 'view': f'/{guid}/files/osfstorage/622aad8d1e399c0c296017b0/?pid={guid}', - 'download': f'/{guid}/files/osfstorage/622aad8d1e399c0c296017b0/?pid={guid}&action=download' - } - - @pytest.mark.enable_enqueue_task - def test_fix_quickfiles_waterbutler_logs_files_renamed(self, node, node_log_files_renamed): - NodeLog(node=node, action=NodeLog.MIGRATED_QUICK_FILES).save() - fix_quickfiles_waterbutler_logs() - log = node.logs.all().get(action='addon_file_renamed') - guid = node.guids.last()._id - - assert log.params['source']['url'] == f'/project/{guid}/files/osfstorage/622aad914ef4bb0ac0333f9f/?pid={guid}' - assert log.params['destination']['url'] == f'/project/{guid}/files/osfstorage/622aad914ef4bb0ac0333f9f/?pid={guid}' - assert log.params['params_node']['_id'] == guid diff --git a/osf_tests/management_commands/test_migrate_notifications.py b/osf_tests/management_commands/test_migrate_notifications.py new file mode 100644 index 00000000000..f303ec3f996 --- /dev/null +++ b/osf_tests/management_commands/test_migrate_notifications.py @@ -0,0 +1,132 @@ +import pytest +from django.contrib.contenttypes.models import ContentType + +from osf.models import Node, RegistrationProvider +from osf_tests.factories import ( + AuthUserFactory, + PreprintProviderFactory, + ProjectFactory, +) +from osf.models import ( + NotificationType, + NotificationSubscription, + NotificationSubscriptionLegacy +) +from osf.management.commands.migrate_notifications import ( + migrate_legacy_notification_subscriptions, + update_notification_types +) + +@pytest.mark.django_db +class TestNotificationSubscriptionMigration: + + @pytest.fixture(autouse=True) + def notification_types(self): + return update_notification_types() + + @pytest.fixture() + def user(self): + return AuthUserFactory() + + @pytest.fixture() + def users(self): + return { + 'none': AuthUserFactory(), + 'digest': AuthUserFactory(), + 'transactional': AuthUserFactory(), + } + + @pytest.fixture() + def provider(self): + return PreprintProviderFactory() + + @pytest.fixture() + def provider2(self): + return PreprintProviderFactory() + + @pytest.fixture() + def node(self): + return ProjectFactory() + + def create_legacy_sub(self, event_name, users, user=None, provider=None, node=None): + legacy = NotificationSubscriptionLegacy.objects.create( + _id=f'{(provider or node)._id}_{event_name}', + user=user, + event_name=event_name, + provider=provider, + node=node + ) + legacy.none.add(users['none']) + legacy.email_digest.add(users['digest']) + legacy.email_transactional.add(users['transactional']) + return legacy + + def test_migrate_provider_subscription(self, user, provider, provider2): + NotificationSubscriptionLegacy.objects.get( + event_name='new_pending_submissions', + provider=provider + ) + NotificationSubscriptionLegacy.objects.get( + event_name='new_pending_submissions', + provider=provider2 + ) + NotificationSubscriptionLegacy.objects.get( + event_name='new_pending_submissions', + provider=RegistrationProvider.get_default() + ) + migrate_legacy_notification_subscriptions() + + subs = NotificationSubscription.objects.filter(notification_type__name='new_pending_submissions') + assert subs.count() == 3 + assert subs.get( + notification_type__name='new_pending_submissions', + object_id=provider.id, + content_type=ContentType.objects.get_for_model(provider.__class__) + ) + assert subs.get( + notification_type__name='new_pending_submissions', + object_id=provider2.id, + content_type=ContentType.objects.get_for_model(provider2.__class__) + ) + + def test_migrate_node_subscription(self, users, user, node): + self.create_legacy_sub('wiki_updated', users, user=user, node=node) + + migrate_legacy_notification_subscriptions() + + nt = NotificationType.objects.get(name='wiki_updated') + assert nt.object_content_type == ContentType.objects.get_for_model(Node) + + subs = NotificationSubscription.objects.filter(notification_type=nt) + assert subs.count() == 1 + + for sub in subs: + assert sub.subscribed_object == node + + def test_multiple_subscriptions_different_types(self, users, user, provider, node): + assert not NotificationSubscription.objects.filter(user=user) + self.create_legacy_sub('wiki_updated', users, user=user, node=node) + migrate_legacy_notification_subscriptions() + assert NotificationSubscription.objects.get(user=user).notification_type.name == 'wiki_updated' + assert NotificationSubscription.objects.get(notification_type__name='wiki_updated', user=user) + + def test_idempotent_migration(self, users, user, node, provider): + self.create_legacy_sub('file_updated', users, user=user, node=node) + migrate_legacy_notification_subscriptions() + migrate_legacy_notification_subscriptions() + assert NotificationSubscription.objects.get( + user=user, + object_id=node.id, + content_type=ContentType.objects.get_for_model(node.__class__), + notification_type__name='file_updated' + ) + + def test_errors_invalid_subscription(self, users): + legacy = NotificationSubscriptionLegacy.objects.create( + _id='broken', + event_name='invalid_event' + ) + legacy.none.add(users['none']) + + with pytest.raises(NotImplementedError): + migrate_legacy_notification_subscriptions() diff --git a/osf_tests/management_commands/test_transfer_quickfiles_to_projects.py b/osf_tests/management_commands/test_transfer_quickfiles_to_projects.py deleted file mode 100644 index daaeee7703c..00000000000 --- a/osf_tests/management_commands/test_transfer_quickfiles_to_projects.py +++ /dev/null @@ -1,43 +0,0 @@ -import pytest - -from api_tests.utils import create_test_file - -from osf.management.commands.transfer_quickfiles_to_projects import ( - remove_quickfiles, - reverse_remove_quickfiles, - QUICKFILES_DESC -) -from osf.models import NodeLog -from osf.models.quickfiles import QuickFilesNode, get_quickfiles_project_title - -from osf_tests.factories import AuthUserFactory - - -@pytest.mark.django_db -class TestTransferQuickfilesToProjects: - - @pytest.fixture() - def user_with_quickfiles(self): - user = AuthUserFactory() - qfnode = QuickFilesNode.objects.create_for_user(user) - create_test_file(target=qfnode, user=user) - return user - - def test_tranfer_quickfiles_to_projects(self, user_with_quickfiles): - remove_quickfiles() - - assert not QuickFilesNode.objects.all() - node = user_with_quickfiles.nodes.get( - title=get_quickfiles_project_title(user_with_quickfiles), - logs__action=NodeLog.MIGRATED_QUICK_FILES, - description=QUICKFILES_DESC - ) - assert node.files.all() - - def test_reverse_tranfer_quickfiles_to_projects(self, user_with_quickfiles): - remove_quickfiles() - reverse_remove_quickfiles() - - quickfiles_node = QuickFilesNode.objects.get_for_user(user_with_quickfiles) - assert QuickFilesNode.objects.all().get() == quickfiles_node - assert quickfiles_node.files.exists() diff --git a/osf_tests/test_analytics.py b/osf_tests/test_analytics.py index 14b6c05df51..2bcd1fd1d10 100644 --- a/osf_tests/test_analytics.py +++ b/osf_tests/test_analytics.py @@ -11,7 +11,7 @@ from addons.osfstorage.models import OsfStorageFile from framework import analytics -from osf.models import PageCounter, OSFGroup +from osf.models import PageCounter from tests.base import OsfTestCase from osf_tests.factories import UserFactory, ProjectFactory @@ -127,26 +127,6 @@ def test_download_update_counter_contributor(self, user, project, file_node): assert page_counter.total == 0 assert page_counter.unique == 0 - platform_group = OSFGroup.objects.create(creator=user, name='Platform') - group_member = UserFactory() - project.add_osf_group(platform_group) - - session['auth_user_id'] = group_member._id - session.save() - PageCounter.update_counter(resource, file_node, version=None, action='download', node_info={ - 'contributors': project.contributors_and_group_members}, session_key=session.session_key - ) - page_counter.refresh_from_db() - assert page_counter.total == 1 - assert page_counter.unique == 1 - - platform_group.make_member(group_member) - PageCounter.update_counter(resource, file_node, version=None, action='download', node_info={ - 'contributors': project.contributors_and_group_members}, session_key=session.session_key - ) - assert page_counter.total == 1 - assert page_counter.unique == 1 - def test_get_all_downloads_on_date(self, page_counter, page_counter2): """ This method tests that multiple pagecounter objects have their download totals summed properly. diff --git a/osf_tests/test_archiver.py b/osf_tests/test_archiver.py index 8a1643cc83f..65ebc719789 100644 --- a/osf_tests/test_archiver.py +++ b/osf_tests/test_archiver.py @@ -7,7 +7,6 @@ from unittest import mock from django.utils import timezone from django.db import IntegrityError -from unittest.mock import call import pytest from framework.auth import Auth @@ -34,6 +33,7 @@ from tests.base import OsfTestCase, fake from tests import utils as test_utils from tests.utils import unique as _unique +from conftest import start_mock_send_grid pytestmark = pytest.mark.django_db @@ -721,10 +721,15 @@ def test_archive_success_same_file_in_component(self): assert child_reg._id in question['extra'][0]['viewUrl'] +@mock.patch('website.mails.settings.USE_EMAIL', True) +@mock.patch('website.mails.settings.USE_CELERY', False) class TestArchiverUtils(ArchiverTestCase): - @mock.patch('website.mails.send_mail') - def test_handle_archive_fail(self, mock_send_mail): + def setUp(self): + super().setUp() + self.mock_send_grid = start_mock_send_grid(self) + + def test_handle_archive_fail(self): archiver_utils.handle_archive_fail( ARCHIVER_NETWORK_ERROR, self.src, @@ -732,13 +737,11 @@ def test_handle_archive_fail(self, mock_send_mail): self.user, {} ) - assert mock_send_mail.call_count == 2 + assert self.mock_send_grid.call_count == 2 self.dst.reload() assert self.dst.is_deleted - @mock.patch('website.mails.send_mail') - def test_handle_archive_fail_copy(self, mock_send_mail): - url = settings.INTERNAL_DOMAIN + self.src._id + def test_handle_archive_fail_copy(self): archiver_utils.handle_archive_fail( ARCHIVER_NETWORK_ERROR, self.src, @@ -746,31 +749,9 @@ def test_handle_archive_fail_copy(self, mock_send_mail): self.user, {} ) - args_user = dict( - to_addr=self.user.username, - user=self.user, - src=self.src, - mail=mails.ARCHIVE_COPY_ERROR_USER, - results={}, - can_change_preferences=False, - ) - args_desk = dict( - to_addr=settings.OSF_SUPPORT_EMAIL, - user=self.user, - src=self.src, - mail=mails.ARCHIVE_COPY_ERROR_DESK, - results={}, - can_change_preferences=False, - url=url, - ) - mock_send_mail.assert_has_calls([ - call(**args_user), - call(**args_desk), - ], any_order=True) - - @mock.patch('website.mails.send_mail') - def test_handle_archive_fail_size(self, mock_send_mail): - url = settings.INTERNAL_DOMAIN + self.src._id + assert self.mock_send_grid.call_count == 2 + + def test_handle_archive_fail_size(self): archiver_utils.handle_archive_fail( ARCHIVER_SIZE_EXCEEDED, self.src, @@ -778,26 +759,7 @@ def test_handle_archive_fail_size(self, mock_send_mail): self.user, {} ) - args_user = dict( - to_addr=self.user.username, - user=self.user, - src=self.src, - mail=mails.ARCHIVE_SIZE_EXCEEDED_USER, - can_change_preferences=False, - ) - args_desk = dict( - to_addr=settings.OSF_SUPPORT_EMAIL, - user=self.user, - src=self.src, - mail=mails.ARCHIVE_SIZE_EXCEEDED_DESK, - stat_result={}, - can_change_preferences=False, - url=url, - ) - mock_send_mail.assert_has_calls([ - call(**args_user), - call(**args_desk), - ], any_order=True) + assert self.mock_send_grid.call_count == 2 def test_aggregate_file_tree_metadata(self): a_stat_result = archiver_utils.aggregate_file_tree_metadata('dropbox', FILE_TREE, self.user) @@ -884,9 +846,14 @@ def test_get_file_map_memoization(self): archiver_utils.get_file_map(node) assert mock_get_file_tree.call_count == call_count - +@mock.patch('website.mails.settings.USE_EMAIL', True) +@mock.patch('website.mails.settings.USE_CELERY', False) class TestArchiverListeners(ArchiverTestCase): + def setUp(self): + super().setUp() + self.mock_send_grid = start_mock_send_grid(self) + @mock.patch('website.archiver.tasks.archive') @mock.patch('website.archiver.utils.before_archive') def test_after_register(self, mock_before_archive, mock_archive): @@ -937,24 +904,21 @@ def test_archive_callback_pending(self, mock_delay): ARCHIVER_SUCCESS ) self.dst.archive_job.save() - with mock.patch('website.mails.send_mail') as mock_send: - with mock.patch('website.archiver.utils.handle_archive_fail') as mock_fail: - listeners.archive_callback(self.dst) - assert not mock_send.called + with mock.patch('website.archiver.utils.handle_archive_fail') as mock_fail: + listeners.archive_callback(self.dst) + assert not self.mock_send_grid.called assert not mock_fail.called assert mock_delay.called - @mock.patch('website.mails.send_mail') @mock.patch('website.archiver.tasks.archive_success.delay') - def test_archive_callback_done_success(self, mock_send, mock_archive_success): + def test_archive_callback_done_success(self, mock_archive_success): self.dst.archive_job.update_target('osfstorage', ARCHIVER_SUCCESS) self.dst.archive_job.save() listeners.archive_callback(self.dst) - assert mock_send.call_count == 1 + assert self.mock_send_grid.call_count == 0 - @mock.patch('website.mails.send_mail') @mock.patch('website.archiver.tasks.archive_success.delay') - def test_archive_callback_done_embargoed(self, mock_send, mock_archive_success): + def test_archive_callback_done_embargoed(self, mock_archive_success): end_date = timezone.now() + datetime.timedelta(days=30) self.dst.archive_job.meta = { 'embargo_urls': { @@ -966,7 +930,7 @@ def test_archive_callback_done_embargoed(self, mock_send, mock_archive_success): self.dst.archive_job.update_target('osfstorage', ARCHIVER_SUCCESS) self.dst.save() listeners.archive_callback(self.dst) - assert mock_send.call_count == 1 + assert self.mock_send_grid.call_count == 0 def test_archive_callback_done_errors(self): self.dst.archive_job.update_target('osfstorage', ARCHIVER_FAILURE) @@ -1043,9 +1007,8 @@ def test_archive_tree_finished_false_for_partial_archive(self): rsibling.save() assert not reg.archive_job.archive_tree_finished() - @mock.patch('website.mails.send_mail') @mock.patch('website.archiver.tasks.archive_success.delay') - def test_archive_callback_on_tree_sends_only_one_email(self, mock_send_success, mock_arhive_success): + def test_archive_callback_on_tree_sends_only_one_email(self, mock_arhive_success): proj = factories.NodeFactory() child = factories.NodeFactory(parent=proj) factories.NodeFactory(parent=child) @@ -1059,16 +1022,15 @@ def test_archive_callback_on_tree_sends_only_one_email(self, mock_send_success, rchild.archive_job.update_target('osfstorage', ARCHIVER_SUCCESS) rchild.save() listeners.archive_callback(rchild) - assert not mock_send_success.called + assert not self.mock_send_grid.called reg.archive_job.update_target('osfstorage', ARCHIVER_SUCCESS) reg.save() listeners.archive_callback(reg) - assert not mock_send_success.called + assert not self.mock_send_grid.called rchild2.archive_job.update_target('osfstorage', ARCHIVER_SUCCESS) rchild2.save() listeners.archive_callback(rchild2) - assert mock_send_success.call_count == 1 - assert mock_send_success.called + assert not self.mock_send_grid.called class TestArchiverScripts(ArchiverTestCase): @@ -1116,8 +1078,14 @@ def test_find_failed_registrations(self): assert pk not in failed +@mock.patch('website.mails.settings.USE_EMAIL', True) +@mock.patch('website.mails.settings.USE_CELERY', False) class TestArchiverBehavior(OsfTestCase): + def setUp(self): + super().setUp() + self.mock_send_grid = start_mock_send_grid(self) + @mock.patch('osf.models.AbstractNode.update_search') def test_archiving_registrations_not_added_to_search_before_archival(self, mock_update_search): proj = factories.ProjectFactory() @@ -1126,9 +1094,8 @@ def test_archiving_registrations_not_added_to_search_before_archival(self, mock_ assert not mock_update_search.called @mock.patch('osf.models.AbstractNode.update_search') - @mock.patch('website.mails.send_mail') @mock.patch('website.archiver.tasks.archive_success.delay') - def test_archiving_nodes_added_to_search_on_archive_success_if_public(self, mock_update_search, mock_send, mock_archive_success): + def test_archiving_nodes_added_to_search_on_archive_success_if_public(self, mock_update_search, mock_archive_success): proj = factories.ProjectFactory() reg = factories.RegistrationFactory(project=proj) reg.save() @@ -1141,8 +1108,7 @@ def test_archiving_nodes_added_to_search_on_archive_success_if_public(self, mock @pytest.mark.enable_search @mock.patch('website.search.elastic_search.delete_doc') - @mock.patch('website.mails.send_mail') - def test_archiving_nodes_not_added_to_search_on_archive_failure(self, mock_send, mock_delete_index_node): + def test_archiving_nodes_not_added_to_search_on_archive_failure(self, mock_delete_index_node): proj = factories.ProjectFactory() reg = factories.RegistrationFactory(project=proj, archive=True) reg.save() @@ -1154,8 +1120,7 @@ def test_archiving_nodes_not_added_to_search_on_archive_failure(self, mock_send, assert mock_delete_index_node.called @mock.patch('osf.models.AbstractNode.update_search') - @mock.patch('website.mails.send_mail') - def test_archiving_nodes_not_added_to_search_on_archive_incomplete(self, mock_send, mock_update_search): + def test_archiving_nodes_not_added_to_search_on_archive_incomplete(self, mock_update_search): proj = factories.ProjectFactory() reg = factories.RegistrationFactory(project=proj) reg.save() diff --git a/osf_tests/test_collection.py b/osf_tests/test_collection.py index d79e03a8323..c28dea3eb99 100644 --- a/osf_tests/test_collection.py +++ b/osf_tests/test_collection.py @@ -16,8 +16,6 @@ CollectionProviderFactory ) from osf.utils.workflows import CollectionSubmissionStates -from website.mails import mails -from osf.models.collection_submission import mails as collection_submission_mail pytestmark = pytest.mark.django_db @@ -73,6 +71,7 @@ def test_can_remove_root_folder_structure_without_cascading(self, user, auth): @pytest.mark.enable_bookmark_creation +@pytest.mark.usefixtures('mock_send_grid') class TestImplicitRemoval: @pytest.fixture @@ -127,32 +126,22 @@ def test_node_removed_from_collection_on_privacy_change(self, auth, collected_no assert associated_collections.filter(collection=bookmark_collection).exists() @mock.patch('osf.models.node.Node.check_privacy_change_viability', mock.Mock()) # mocks the storage usage limits - def test_node_removed_from_collection_on_privacy_change_notify(self, auth, provider_collected_node, bookmark_collection): + def test_node_removed_from_collection_on_privacy_change_notify(self, auth, provider_collected_node, bookmark_collection, mock_send_grid): associated_collections = provider_collected_node.guids.first().collectionsubmission_set assert associated_collections.count() == 3 - send_mail = mails.send_mail - with mock.patch.object(collection_submission_mail, 'send_mail') as mock_send: - mock_send.side_effect = send_mail # implicitly test rendering - provider_collected_node.set_privacy('private', auth=auth) - assert mock_send.called - assert len(mock_send.call_args_list) == 1 - email1 = mock_send.call_args_list[0] - _, email1_kwargs = email1 - assert {email1_kwargs['node'].id} == {provider_collected_node.id} - expected_mail = mails.COLLECTION_SUBMISSION_REMOVED_PRIVATE(associated_collections.last().collection, provider_collected_node) - assert {email1_kwargs['mail'].tpl_prefix} == {expected_mail.tpl_prefix} + mock_send_grid.reset_mock() + provider_collected_node.set_privacy('private', auth=auth) + assert mock_send_grid.called + assert len(mock_send_grid.call_args_list) == 1 @mock.patch('osf.models.node.Node.check_privacy_change_viability', mock.Mock()) # mocks the storage usage limits - def test_node_removed_from_collection_on_privacy_change_no_provider(self, auth, collected_node, bookmark_collection): + def test_node_removed_from_collection_on_privacy_change_no_provider(self, auth, collected_node, bookmark_collection, mock_send_grid): associated_collections = collected_node.guids.first().collectionsubmission_set assert associated_collections.count() == 3 - send_mail = mails.send_mail - with mock.patch.object(collection_submission_mail, 'send_mail') as mock_send: - mock_send.side_effect = send_mail # implicitly test rendering - collected_node.set_privacy('private', auth=auth) - assert not mock_send.called + collected_node.set_privacy('private', auth=auth) + assert not mock_send_grid.called def test_node_removed_from_collection_on_delete(self, collected_node, bookmark_collection, auth): associated_collections = collected_node.guids.first().collectionsubmission_set diff --git a/osf_tests/test_collection_submission.py b/osf_tests/test_collection_submission.py index 97ea2c8692a..2ff2b279a6b 100644 --- a/osf_tests/test_collection_submission.py +++ b/osf_tests/test_collection_submission.py @@ -13,9 +13,6 @@ from osf.utils.workflows import CollectionSubmissionStates from framework.exceptions import PermissionsError from api_tests.utils import UserRoles -from website.mails import mails -from osf_tests.utils import assert_notification_correctness -from osf.models.collection_submission import mails as collection_submission_mail from osf.management.commands.populate_collection_provider_notification_subscriptions import populate_collection_provider_notification_subscriptions from django.utils import timezone @@ -147,6 +144,7 @@ def configure_test_auth(node, user_role, provider=None): @pytest.mark.django_db +@pytest.mark.usefixtures('mock_send_grid') class TestModeratedCollectionSubmission: MOCK_NOW = timezone.now() @@ -161,23 +159,15 @@ def test_submit(self, moderated_collection_submission): # .submit on post_save assert moderated_collection_submission.state == CollectionSubmissionStates.PENDING - def test_notify_contributors_pending(self, node, moderated_collection): - send_mail = mails.send_mail - with mock.patch.object(collection_submission_mail, 'send_mail') as mock_send: - mock_send.side_effect = send_mail # implicitly test rendering - collection_submission = CollectionSubmission( - guid=node.guids.first(), - collection=moderated_collection, - creator=node.creator, - ) - collection_submission.save() - assert mock_send.called - assert collection_submission.state == CollectionSubmissionStates.PENDING - assert_notification_correctness( - mock_send, - mails.COLLECTION_SUBMISSION_SUBMITTED(collection_submission.creator, node), - {user.username for user in node.contributors.all()} + def test_notify_contributors_pending(self, node, moderated_collection, mock_send_grid): + collection_submission = CollectionSubmission( + guid=node.guids.first(), + collection=moderated_collection, + creator=node.creator, ) + collection_submission.save() + assert mock_send_grid.called + assert collection_submission.state == CollectionSubmissionStates.PENDING def test_notify_moderators_pending(self, node, moderated_collection): from website.notifications import emails @@ -216,19 +206,11 @@ def test_accept_success(self, node, moderated_collection_submission): moderated_collection_submission.accept(user=moderator, comment='Test Comment') assert moderated_collection_submission.state == CollectionSubmissionStates.ACCEPTED - def test_notify_moderated_accepted(self, node, moderated_collection_submission): + def test_notify_moderated_accepted(self, node, moderated_collection_submission, mock_send_grid): moderator = configure_test_auth(node, UserRoles.MODERATOR) - send_mail = mails.send_mail - with mock.patch.object(collection_submission_mail, 'send_mail') as mock_send: - mock_send.side_effect = send_mail # implicitly test rendering - moderated_collection_submission.accept(user=moderator, comment='Test Comment') - assert mock_send.called + moderated_collection_submission.accept(user=moderator, comment='Test Comment') + assert mock_send_grid.called assert moderated_collection_submission.state == CollectionSubmissionStates.ACCEPTED - assert_notification_correctness( - mock_send, - mails.COLLECTION_SUBMISSION_ACCEPTED(moderated_collection_submission.collection, node), - {user.username for user in node.contributors.all()} - ) @pytest.mark.parametrize('user_role', [UserRoles.UNAUTHENTICATED, UserRoles.NONCONTRIB]) def test_reject_fails(self, node, user_role, moderated_collection_submission): @@ -242,20 +224,12 @@ def test_reject_success(self, node, moderated_collection_submission): moderated_collection_submission.reject(user=moderator, comment='Test Comment') assert moderated_collection_submission.state == CollectionSubmissionStates.REJECTED - def test_notify_moderated_rejected(self, node, moderated_collection_submission): + def test_notify_moderated_rejected(self, node, moderated_collection_submission, mock_send_grid): moderator = configure_test_auth(node, UserRoles.MODERATOR) - send_mail = mails.send_mail - with mock.patch.object(collection_submission_mail, 'send_mail') as mock_send: - mock_send.side_effect = send_mail # implicitly test rendering - moderated_collection_submission.reject(user=moderator, comment='Test Comment') - assert mock_send.called - assert moderated_collection_submission.state == CollectionSubmissionStates.REJECTED - assert_notification_correctness( - mock_send, - mails.COLLECTION_SUBMISSION_REJECTED(moderated_collection_submission.collection, node), - {user.username for user in node.contributors.all()} - ) + moderated_collection_submission.reject(user=moderator, comment='Test Comment') + assert mock_send_grid.called + assert moderated_collection_submission.state == CollectionSubmissionStates.REJECTED @pytest.mark.parametrize('user_role', UserRoles.excluding(*[UserRoles.ADMIN_USER, UserRoles.MODERATOR])) def test_remove_fails(self, node, user_role, moderated_collection_submission): @@ -274,37 +248,21 @@ def test_remove_success(self, node, user_role, moderated_collection_submission): moderated_collection_submission.remove(user=user, comment='Test Comment') assert moderated_collection_submission.state == CollectionSubmissionStates.REMOVED - def test_notify_moderated_removed_moderator(self, node, moderated_collection_submission): + def test_notify_moderated_removed_moderator(self, node, moderated_collection_submission, mock_send_grid): moderated_collection_submission.state_machine.set_state(CollectionSubmissionStates.ACCEPTED) moderator = configure_test_auth(node, UserRoles.MODERATOR) - send_mail = mails.send_mail - with mock.patch.object(collection_submission_mail, 'send_mail') as mock_send: - mock_send.side_effect = send_mail # implicitly test rendering - moderated_collection_submission.remove(user=moderator, comment='Test Comment') - assert mock_send.called - assert moderated_collection_submission.state == CollectionSubmissionStates.REMOVED - assert_notification_correctness( - mock_send, - mails.COLLECTION_SUBMISSION_REMOVED_MODERATOR(moderated_collection_submission.collection, node), - {user.username for user in node.contributors.all()} - ) + moderated_collection_submission.remove(user=moderator, comment='Test Comment') + assert mock_send_grid.called + assert moderated_collection_submission.state == CollectionSubmissionStates.REMOVED - def test_notify_moderated_removed_admin(self, node, moderated_collection_submission): + def test_notify_moderated_removed_admin(self, node, moderated_collection_submission, mock_send_grid): moderated_collection_submission.state_machine.set_state(CollectionSubmissionStates.ACCEPTED) moderator = configure_test_auth(node, UserRoles.ADMIN_USER) - send_mail = mails.send_mail - with mock.patch.object(collection_submission_mail, 'send_mail') as mock_send: - mock_send.side_effect = send_mail # implicitly test rendering - moderated_collection_submission.remove(user=moderator, comment='Test Comment') - assert mock_send.called - assert moderated_collection_submission.state == CollectionSubmissionStates.REMOVED - assert_notification_correctness( - mock_send, - mails.COLLECTION_SUBMISSION_REMOVED_ADMIN(moderated_collection_submission.collection, node), - {user.username for user in node.contributors.all()} - ) + moderated_collection_submission.remove(user=moderator, comment='Test Comment') + assert mock_send_grid.called + assert moderated_collection_submission.state == CollectionSubmissionStates.REMOVED def test_resubmit_success(self, node, moderated_collection_submission): user = configure_test_auth(node, UserRoles.ADMIN_USER) @@ -340,6 +298,7 @@ def test_cancel_succeeds(self, node, moderated_collection_submission): @pytest.mark.django_db +@pytest.mark.usefixtures('mock_send_grid') class TestUnmoderatedCollectionSubmission: def test_moderated_submit(self, unmoderated_collection_submission): @@ -377,21 +336,13 @@ def test_remove_success(self, user_role, node, unmoderated_collection_submission unmoderated_collection_submission.remove(user=user, comment='Test Comment') assert unmoderated_collection_submission.state == CollectionSubmissionStates.REMOVED - def test_notify_moderated_removed_admin(self, node, unmoderated_collection_submission): + def test_notify_moderated_removed_admin(self, node, unmoderated_collection_submission, mock_send_grid): unmoderated_collection_submission.state_machine.set_state(CollectionSubmissionStates.ACCEPTED) moderator = configure_test_auth(node, UserRoles.ADMIN_USER) - send_mail = mails.send_mail - with mock.patch.object(collection_submission_mail, 'send_mail') as mock_send: - mock_send.side_effect = send_mail # implicitly test rendering - unmoderated_collection_submission.remove(user=moderator, comment='Test Comment') - assert mock_send.called - assert unmoderated_collection_submission.state == CollectionSubmissionStates.REMOVED - assert_notification_correctness( - mock_send, - mails.COLLECTION_SUBMISSION_REMOVED_ADMIN(unmoderated_collection_submission.collection, node), - {user.username for user in node.contributors.all()} - ) + unmoderated_collection_submission.remove(user=moderator, comment='Test Comment') + assert mock_send_grid.called + assert unmoderated_collection_submission.state == CollectionSubmissionStates.REMOVED def test_resubmit_success(self, node, unmoderated_collection_submission): user = configure_test_auth(node, UserRoles.ADMIN_USER) @@ -427,6 +378,7 @@ def test_cancel_succeeds(self, node, unmoderated_collection_submission): @pytest.mark.django_db +@pytest.mark.usefixtures('mock_send_grid') class TestHybridModeratedCollectionSubmission: @pytest.mark.parametrize('user_role', UserRoles.excluding(UserRoles.MODERATOR)) @@ -482,20 +434,12 @@ def test_accept_success(self, node, hybrid_moderated_collection_submission): hybrid_moderated_collection_submission.accept(user=moderator, comment='Test Comment') assert hybrid_moderated_collection_submission.state == CollectionSubmissionStates.ACCEPTED - def test_notify_moderated_accepted(self, node, hybrid_moderated_collection_submission): + def test_notify_moderated_accepted(self, node, hybrid_moderated_collection_submission, mock_send_grid): moderator = configure_test_auth(node, UserRoles.MODERATOR) - send_mail = mails.send_mail - with mock.patch.object(collection_submission_mail, 'send_mail') as mock_send: - mock_send.side_effect = send_mail # implicitly test rendering - hybrid_moderated_collection_submission.accept(user=moderator, comment='Test Comment') - assert mock_send.called - assert hybrid_moderated_collection_submission.state == CollectionSubmissionStates.ACCEPTED - assert_notification_correctness( - mock_send, - mails.COLLECTION_SUBMISSION_ACCEPTED(hybrid_moderated_collection_submission.collection, node), - {user.username for user in node.contributors.all()} - ) + hybrid_moderated_collection_submission.accept(user=moderator, comment='Test Comment') + assert mock_send_grid.called + assert hybrid_moderated_collection_submission.state == CollectionSubmissionStates.ACCEPTED @pytest.mark.parametrize('user_role', [UserRoles.UNAUTHENTICATED, UserRoles.NONCONTRIB]) def test_reject_fails(self, node, user_role, hybrid_moderated_collection_submission): @@ -509,20 +453,12 @@ def test_reject_success(self, node, hybrid_moderated_collection_submission): hybrid_moderated_collection_submission.reject(user=moderator, comment='Test Comment') assert hybrid_moderated_collection_submission.state == CollectionSubmissionStates.REJECTED - def test_notify_moderated_rejected(self, node, hybrid_moderated_collection_submission): + def test_notify_moderated_rejected(self, node, hybrid_moderated_collection_submission, mock_send_grid): moderator = configure_test_auth(node, UserRoles.MODERATOR) - send_mail = mails.send_mail - with mock.patch.object(collection_submission_mail, 'send_mail') as mock_send: - mock_send.side_effect = send_mail # implicitly test rendering - hybrid_moderated_collection_submission.reject(user=moderator, comment='Test Comment') - assert mock_send.called - assert hybrid_moderated_collection_submission.state == CollectionSubmissionStates.REJECTED - assert_notification_correctness( - mock_send, - mails.COLLECTION_SUBMISSION_REJECTED(hybrid_moderated_collection_submission.collection, node), - {user.username for user in node.contributors.all()} - ) + hybrid_moderated_collection_submission.reject(user=moderator, comment='Test Comment') + assert mock_send_grid.called + assert hybrid_moderated_collection_submission.state == CollectionSubmissionStates.REJECTED @pytest.mark.parametrize('user_role', UserRoles.excluding(*[UserRoles.ADMIN_USER, UserRoles.MODERATOR])) def test_remove_fails(self, node, user_role, hybrid_moderated_collection_submission): @@ -541,37 +477,21 @@ def test_remove_success(self, node, user_role, hybrid_moderated_collection_submi hybrid_moderated_collection_submission.remove(user=user, comment='Test Comment') assert hybrid_moderated_collection_submission.state == CollectionSubmissionStates.REMOVED - def test_notify_moderated_removed_moderator(self, node, hybrid_moderated_collection_submission): + def test_notify_moderated_removed_moderator(self, node, hybrid_moderated_collection_submission, mock_send_grid): hybrid_moderated_collection_submission.state_machine.set_state(CollectionSubmissionStates.ACCEPTED) moderator = configure_test_auth(node, UserRoles.MODERATOR) - send_mail = mails.send_mail - with mock.patch.object(collection_submission_mail, 'send_mail') as mock_send: - mock_send.side_effect = send_mail # implicitly test rendering - hybrid_moderated_collection_submission.remove(user=moderator, comment='Test Comment') - assert mock_send.called - assert hybrid_moderated_collection_submission.state == CollectionSubmissionStates.REMOVED - assert_notification_correctness( - mock_send, - mails.COLLECTION_SUBMISSION_REMOVED_MODERATOR(hybrid_moderated_collection_submission.collection, node), - {user.username for user in node.contributors.all()} - ) + hybrid_moderated_collection_submission.remove(user=moderator, comment='Test Comment') + assert mock_send_grid.called + assert hybrid_moderated_collection_submission.state == CollectionSubmissionStates.REMOVED - def test_notify_moderated_removed_admin(self, node, hybrid_moderated_collection_submission): + def test_notify_moderated_removed_admin(self, node, hybrid_moderated_collection_submission, mock_send_grid): hybrid_moderated_collection_submission.state_machine.set_state(CollectionSubmissionStates.ACCEPTED) moderator = configure_test_auth(node, UserRoles.ADMIN_USER) - send_mail = mails.send_mail - with mock.patch.object(collection_submission_mail, 'send_mail') as mock_send: - mock_send.side_effect = send_mail # implicitly test rendering - hybrid_moderated_collection_submission.remove(user=moderator, comment='Test Comment') - assert mock_send.called - assert hybrid_moderated_collection_submission.state == CollectionSubmissionStates.REMOVED - assert_notification_correctness( - mock_send, - mails.COLLECTION_SUBMISSION_REMOVED_ADMIN(hybrid_moderated_collection_submission.collection, node), - {user.username for user in node.contributors.all()} - ) + hybrid_moderated_collection_submission.remove(user=moderator, comment='Test Comment') + assert mock_send_grid.called + assert hybrid_moderated_collection_submission.state == CollectionSubmissionStates.REMOVED def test_resubmit_success(self, node, hybrid_moderated_collection_submission): user = configure_test_auth(node, UserRoles.ADMIN_USER) diff --git a/osf_tests/test_comment.py b/osf_tests/test_comment.py index 878b12962d9..7f247d403d5 100644 --- a/osf_tests/test_comment.py +++ b/osf_tests/test_comment.py @@ -14,7 +14,6 @@ from website import settings from addons.osfstorage import settings as osfstorage_settings from website.project.views.comment import update_file_guid_referent -from website.project.signals import comment_added, mention_added from framework.exceptions import PermissionsError from tests.base import capture_signals from osf.models import Comment, NodeLog, Guid, BaseFileNode @@ -27,7 +26,6 @@ UserFactory, UnregUserFactory, AuthUserFactory, - OSFGroupFactory, ) # All tests will require a databse @@ -220,36 +218,12 @@ class TestCommentModel: ] create_cases = [ - # Make sure valid mentions send signals - { - 'comment_content': comment_mention_valid, - 'expected_signals': {comment_added, mention_added}, - 'expected_error_msg': None, - }, - # User mentions a contributor - { - 'comment_content': comment_contributor_mentioned, - 'expected_signals': {comment_added, mention_added}, - 'expected_error_msg': None, - }, # Make sure comments aren't NoneType { 'comment_content': None, 'expected_signals': set(), 'expected_error_msg': "{'content': ['This field cannot be null.']}", }, - # User makes valid comment - { - 'comment_content': comment_valid, - 'expected_signals': {comment_added}, - 'expected_error_msg': None, - }, - # User mentions themselves - { - 'comment_content': comment_self_mentioned, - 'expected_signals': {comment_added, mention_added}, - 'expected_error_msg': None, - }, # Prevent user from entering a comment that's too long with a mention { 'comment_content': comment_too_long_with_mention, @@ -258,41 +232,17 @@ class TestCommentModel: }, ] edit_cases = [ - # Send if mention is valid - { - 'comment_content': comment_mention_valid, - 'expected_signals': {mention_added}, - 'expected_error_msg': None, - }, - # User mentions a contributor - { - 'comment_content': comment_contributor_mentioned, - 'expected_signals': {mention_added}, - 'expected_error_msg': None, - }, # User edits valid comment { 'comment_content': comment_valid, 'expected_signals': set(), 'expected_error_msg': None, }, - # User mentions themselves - { - 'comment_content': comment_self_mentioned, - 'expected_signals': {mention_added}, - 'expected_error_msg': None, - }, # Don't send mention if already mentioned { 'comment_content': comment_mention_edited_twice, 'expected_signals': set(), 'expected_error_msg': None, - }, - # Send mention if already mentioned - { - 'comment_content': comment_mention_project_with_contributor, - 'expected_signals': {mention_added}, - 'expected_error_msg': None, } ] params = { @@ -353,21 +303,6 @@ def test_edit(self): assert comment.node.logs.count() == 2 assert comment.node.logs.latest().action == NodeLog.COMMENT_UPDATED - def test_create_sends_mention_added_signal_if_group_member_mentions(self, node, user, auth): - manager = AuthUserFactory() - group = OSFGroupFactory(creator=manager) - node.add_osf_group(group) - assert node.is_contributor_or_group_member(manager) is True - with capture_signals() as mock_signals: - Comment.create( - auth=auth, - user=user, - node=node, - target=node.guids.all()[0], - content='This is a comment with a group member mention [@Group Member](http://localhost:5000/' + manager._id + '/).' - ) - assert mock_signals.signals_sent() == ({comment_added, mention_added}) - def test_delete(self, node): comment = CommentFactory(node=node) auth = Auth(comment.user) diff --git a/osf_tests/test_draft_registration.py b/osf_tests/test_draft_registration.py index f7beb3ceae8..c5b38632230 100644 --- a/osf_tests/test_draft_registration.py +++ b/osf_tests/test_draft_registration.py @@ -68,17 +68,6 @@ def test_register(self): draft.register(auth) assert draft.registered_node - # group member with admin access cannot register - member = factories.AuthUserFactory() - osf_group = factories.OSFGroupFactory(creator=user) - osf_group.make_member(member, auth=auth) - project.add_osf_group(osf_group, ADMIN) - draft_2 = factories.DraftRegistrationFactory(branched_from=project) - assert project.has_permission(member, ADMIN) - with pytest.raises(PermissionsError): - draft_2.register(Auth(member)) - assert not draft_2.registered_node - @mock.patch('website.settings.ENABLE_ARCHIVER', False) def test_register_no_title_fails(self): user = factories.UserFactory() @@ -198,9 +187,7 @@ def test_create_from_node_existing(self, user): node = factories.ProjectFactory(creator=user) member = factories.AuthUserFactory() - osf_group = factories.OSFGroupFactory(creator=user) - osf_group.make_member(member, auth=Auth(user)) - node.add_osf_group(osf_group, ADMIN) + node.add_contributor(member, permissions=ADMIN) write_contrib = factories.AuthUserFactory() subject = factories.SubjectFactory() @@ -246,8 +233,6 @@ def test_create_from_node_existing(self, user): assert draft.category == category assert user in draft.contributors.all() assert write_contrib in draft.contributors.all() - assert member not in draft.contributors.all() - assert not draft.has_permission(member, 'read') assert draft.get_permissions(user) == [READ, WRITE, ADMIN] assert draft.get_permissions(write_contrib) == [READ, WRITE] diff --git a/osf_tests/test_elastic_search.py b/osf_tests/test_elastic_search.py index 56c42391095..396e0d6b2aa 100644 --- a/osf_tests/test_elastic_search.py +++ b/osf_tests/test_elastic_search.py @@ -15,7 +15,6 @@ from osf.models import ( Retraction, NodeLicense, - OSFGroup, Tag, Preprint, ) @@ -443,77 +442,6 @@ def test_make_public(self): assert len(docs) == 1 -@pytest.mark.enable_search -@pytest.mark.enable_enqueue_task -class TestOSFGroup(OsfTestCase): - - def setUp(self): - with run_celery_tasks(): - super().setUp() - search.delete_index(elastic_search.INDEX) - search.create_index(elastic_search.INDEX) - self.user = factories.UserFactory(fullname='John Deacon') - self.user_two = factories.UserFactory(fullname='Grapes McGee') - self.group = OSFGroup( - name='Cornbread', - creator=self.user, - ) - self.group.save() - self.project = factories.ProjectFactory(is_public=True, creator=self.user, title='Biscuits') - self.project.save() - - def test_create_osf_group(self): - title = 'Butter' - group = OSFGroup(name=title, creator=self.user) - group.save() - docs = query(title)['results'] - assert len(docs) == 1 - - def test_set_group_name(self): - title = 'Eggs' - self.group.set_group_name(title) - self.group.save() - docs = query(title)['results'] - assert len(docs) == 1 - - docs = query('Cornbread')['results'] - assert len(docs) == 0 - - def test_add_member(self): - self.group.make_member(self.user_two) - docs = query(f'category:group AND "{self.user_two.fullname}"')['results'] - assert len(docs) == 1 - - self.group.make_manager(self.user_two) - docs = query(f'category:group AND "{self.user_two.fullname}"')['results'] - assert len(docs) == 1 - - self.group.remove_member(self.user_two) - docs = query(f'category:group AND "{self.user_two.fullname}"')['results'] - assert len(docs) == 0 - - def test_connect_to_node(self): - self.project.add_osf_group(self.group) - docs = query(f'category:project AND "{self.group.name}"')['results'] - assert len(docs) == 1 - - self.project.remove_osf_group(self.group) - docs = query(f'category:project AND "{self.group.name}"')['results'] - assert len(docs) == 0 - - def test_remove_group(self): - group_name = self.group.name - self.project.add_osf_group(self.group) - docs = query(f'category:project AND "{group_name}"')['results'] - assert len(docs) == 1 - - self.group.remove_group() - docs = query(f'category:project AND "{group_name}"')['results'] - assert len(docs) == 0 - docs = query(group_name)['results'] - assert len(docs) == 0 - - @pytest.mark.enable_search @pytest.mark.enable_enqueue_task class TestPreprint(OsfTestCase): diff --git a/osf_tests/test_institution.py b/osf_tests/test_institution.py index 77bf32377b2..eca6737b6e5 100644 --- a/osf_tests/test_institution.py +++ b/osf_tests/test_institution.py @@ -12,7 +12,6 @@ RegionFactory, UserFactory, ) -from website import mails, settings @pytest.mark.django_db @@ -110,6 +109,7 @@ def test_non_group_member_doesnt_have_perms(self, institution, user): @pytest.mark.django_db +@pytest.mark.usefixtures('mock_send_grid') class TestInstitutionManager: def test_deactivated_institution_not_in_default_queryset(self): @@ -146,9 +146,7 @@ def test_reactivate_institution(self): institution.reactivate() assert institution.deactivated is None - @mock.patch('website.mails.settings.USE_EMAIL', False) - @mock.patch('website.mails.send_mail', return_value=None, side_effect=mails.send_mail) - def test_send_deactivation_email_call_count(self, mock_send_mail): + def test_send_deactivation_email_call_count(self, mock_send_grid): institution = InstitutionFactory() user_1 = UserFactory() user_1.add_or_update_affiliated_institution(institution) @@ -157,24 +155,15 @@ def test_send_deactivation_email_call_count(self, mock_send_mail): user_2.add_or_update_affiliated_institution(institution) user_2.save() institution._send_deactivation_email() - assert mock_send_mail.call_count == 2 + assert mock_send_grid.call_count == 2 - @mock.patch('website.mails.settings.USE_EMAIL', False) - @mock.patch('website.mails.send_mail', return_value=None, side_effect=mails.send_mail) - def test_send_deactivation_email_call_args(self, mock_send_mail): + def test_send_deactivation_email_call_args(self, mock_send_grid): institution = InstitutionFactory() user = UserFactory() user.add_or_update_affiliated_institution(institution) user.save() institution._send_deactivation_email() - forgot_password = 'forgotpassword' if settings.DOMAIN.endswith('/') else '/forgotpassword' - mock_send_mail.assert_called_with( - to_addr=user.username, - mail=mails.INSTITUTION_DEACTIVATION, - user=user, - forgot_password_link=f'{settings.DOMAIN}{forgot_password}', - osf_support_email=settings.OSF_SUPPORT_EMAIL - ) + mock_send_grid.assert_called() def test_deactivate_inactive_institution_noop(self): institution = InstitutionFactory() diff --git a/osf_tests/test_merging_users.py b/osf_tests/test_merging_users.py index 0bb124c4f13..ee13c7bc107 100644 --- a/osf_tests/test_merging_users.py +++ b/osf_tests/test_merging_users.py @@ -24,18 +24,22 @@ from tests.utils import run_celery_tasks from waffle.testutils import override_flag from osf.features import ENABLE_GV +from conftest import start_mock_send_grid SessionStore = import_module(django_conf_settings.SESSION_ENGINE).SessionStore @pytest.mark.enable_implicit_clean @pytest.mark.enable_bookmark_creation +@mock.patch('website.mails.settings.USE_EMAIL', True) +@mock.patch('website.mails.settings.USE_CELERY', False) class TestUserMerging(OsfTestCase): def setUp(self): super().setUp() self.user = UserFactory() with self.context: handlers.celery_before_request() + self.mock_send_grid = start_mock_send_grid(self) def _add_unconfirmed_user(self): self.unconfirmed = UnconfirmedUserFactory() @@ -286,12 +290,11 @@ def test_merge_unregistered(self): assert self.user.is_invited is True assert self.user in self.project_with_unreg_contrib.contributors - @mock.patch('website.project.views.contributor.mails.send_mail') - def test_merge_doesnt_send_signal(self, mock_notify): + def test_merge_doesnt_send_signal(self): #Explictly reconnect signal as it is disconnected by default for test contributor_added.connect(notify_added_contributor) other_user = UserFactory() with override_flag(ENABLE_GV, active=True): self.user.merge_user(other_user) assert other_user.merged_by._id == self.user._id - assert mock_notify.called is False + assert self.mock_send_grid.called is False diff --git a/osf_tests/test_node.py b/osf_tests/test_node.py index d397c7099f0..c5e25b4b30e 100644 --- a/osf_tests/test_node.py +++ b/osf_tests/test_node.py @@ -60,7 +60,6 @@ InstitutionFactory, SubjectFactory, TagFactory, - OSFGroupFactory, CollectionFactory, CollectionProviderFactory, ) @@ -891,23 +890,6 @@ def test_add_contributor(self, node, user, auth): assert user2 in user.recently_added.all() - def test_add_contributor_already_group_member(self, node, user, auth): - group = OSFGroupFactory(creator=user) - user2 = UserFactory() - group.make_member(user2) - node.add_osf_group(group, permissions.ADMIN) - - assert node.is_contributor_or_group_member(user2) is True - assert node.is_contributor(user2) is False - assert node.has_permission(user2, permissions.ADMIN) - - node.add_contributor(contributor=user2, auth=auth) - node.save() - assert node.is_contributor(user2) is True - assert node.has_permission(user2, permissions.ADMIN) - # Even though user2 has admin perms, they don't have it through admin contributorship - assert node.is_admin_contributor(user2) is False - def test_add_contributors(self, node, auth): user1 = UserFactory() user2 = UserFactory() @@ -981,11 +963,6 @@ def test_is_contributor(self, node): assert node.is_contributor(noncontrib) is False assert node.is_contributor(None) is False - group = OSFGroupFactory(creator=noncontrib) - node.add_osf_group(group, permissions.READ) - assert node.is_contributor(noncontrib) is False - assert node.is_contributor_or_group_member(noncontrib) is True - superuser = AuthUserFactory() superuser.is_superuser = True superuser.save() @@ -1003,11 +980,6 @@ def test_is_admin_contributor(self, node): node.set_permissions(contrib, WRITE) - group = OSFGroupFactory(creator=contrib) - node.add_osf_group(group, permissions.ADMIN) - assert node.has_permission(contrib, permissions.ADMIN) is True - assert node.is_admin_contributor(contrib) is False - def test_visible_contributor_ids(self, node, user): visible_contrib = UserFactory() invisible_contrib = UserFactory() @@ -1065,14 +1037,6 @@ def test_set_visible_missing(self, node): with pytest.raises(ValueError): node.set_visible(UserFactory(), True) - def test_set_visible_group_member(self, node, user): - user2 = AuthUserFactory() - group = OSFGroupFactory(creator=user2) - node.add_osf_group(group, permissions.ADMIN) - - with pytest.raises(ValueError): - node.set_visible(user2, True) - def test_copy_contributors_from_adds_contributors(self, node): contrib, contrib2 = UserFactory(), UserFactory() node.add_contributor(contrib, visible=True) @@ -1100,15 +1064,12 @@ def test_copy_contributors_from_preserves_visibility(self, node): def test_copy_contributors_from_preserves_permissions(self, node): read, admin = UserFactory(), UserFactory() - group = OSFGroupFactory(creator=read) node.add_contributor(read, permissions.READ, visible=True) node.add_contributor(admin, permissions.ADMIN, visible=False) - node.add_osf_group(group, permissions.WRITE) node2 = NodeFactory() node2.copy_contributors_from(node) assert node2.has_permission(read, permissions.READ) is True - assert node2.has_permission(read, permissions.WRITE) is False assert node2.has_permission(admin, permissions.ADMIN) is True def test_remove_contributor(self, node, auth): @@ -1126,19 +1087,6 @@ def test_remove_contributor(self, node, auth): assert node.logs.latest().action == 'contributor_removed' assert node.logs.latest().params['contributors'] == [user2._id] - def test_remove_contributor_admin_group_members(self, node, user, auth): - user2 = UserFactory() - group = OSFGroupFactory(creator=user2) - node.add_osf_group(group, permissions.ADMIN) - assert node.has_permission(user2, permissions.ADMIN) is True - - removed = node.remove_contributor(contributor=user, auth=auth) - assert removed is False - # Contributor could not be removed even though there was another - # user with admin perms - group membership insufficient - assert node.has_permission(user, permissions.ADMIN) is True - assert node.is_contributor(user) is True - def test_remove_contributors(self, node, auth): user1 = UserFactory() user2 = UserFactory() @@ -1342,12 +1290,6 @@ def test_parent_admin_contributors(self, user): child_two = ProjectFactory(parent=project, creator=user_two) assert child_two.parent_admin_contributors.count() == 1 - user_three = UserFactory() - group = OSFGroupFactory(name='Platform', creator=user_three) - project.add_osf_group(group, permissions.ADMIN) - assert child_two.parent_admin_contributors.count() == 1 - assert child_two.parent_admin_users.count() == 2 - def test_admin_contributor_or_group_member_ids(self, user): project = ProjectFactory(creator=user) assert project.admin_contributor_or_group_member_ids == {user._id} @@ -1362,21 +1304,6 @@ def test_admin_contributor_or_group_member_ids(self, user): assert child1.admin_contributor_or_group_member_ids == {child1.creator._id, admin._id} assert child2.admin_contributor_or_group_member_ids == {child2.creator._id, child1.creator._id, admin._id} - # OSFGroup added with write perms - group_member = UserFactory() - group = OSFGroupFactory(creator=group_member) - project.add_osf_group(group, permissions.WRITE) - project.save() - assert child1.admin_contributor_or_group_member_ids == {child1.creator._id, admin._id} - assert child2.admin_contributor_or_group_member_ids == {child2.creator._id, child1.creator._id, admin._id} - - # OSFGroup updated to admin perms - project.update_osf_group(group, permissions.ADMIN) - project.save() - assert child1.admin_contributor_or_group_member_ids == {child1.creator._id, admin._id, group_member._id} - assert child2.admin_contributor_or_group_member_ids == {child2.creator._id, child1.creator._id, admin._id, group_member._id} - - class TestContributorAddedSignal: # Override disconnected signals from conftest @@ -1384,8 +1311,7 @@ class TestContributorAddedSignal: def disconnected_signals(self): return None - @mock.patch('website.project.views.contributor.mails.send_mail') - def test_add_contributors_sends_contributor_added_signal(self, mock_send_mail, node, auth): + def test_add_contributors_sends_contributor_added_signal(self, node, auth): user = UserFactory() contributors = [{ 'user': user, @@ -1546,11 +1472,6 @@ def test_set_permissions(self, node, user): ) node.save() - with pytest.raises(NodeStateError): - node.set_permissions(user, WRITE) - - group = OSFGroupFactory(creator=user) - node.add_osf_group(group, ADMIN) with pytest.raises(NodeStateError): node.set_permissions(user, WRITE) @@ -1559,20 +1480,6 @@ def test_set_permissions(self, node, user): assert node.has_permission(high, permissions.WRITE) is True assert node.has_permission(high, permissions.ADMIN) is True - def test_set_permissions_raises_error_if_only_admins_permissions_are_reduced(self, node): - # creator is the only admin - with pytest.raises(NodeStateError) as excinfo: - node.set_permissions(node.creator, permissions=WRITE) - assert excinfo.value.args[0] == 'Must have at least one registered admin contributor' - - new_user = AuthUserFactory() - osf_group = OSFGroupFactory(creator=new_user) - node.add_osf_group(osf_group, permissions.ADMIN) - # A group member being added as a contributor doesn't throw any errors, even if that - # group member is being downgraded to write. Group members don't count towards - # the one registered admin contributor tally - node.set_permissions(new_user, permissions.WRITE) - def test_add_permission_with_admin_also_grants_read_and_write(self, node): user = UserFactory() Contributor.objects.create( @@ -1936,24 +1843,6 @@ def test_register_node_copies_contributors_from_draft_registration(self, mock_si assert registration.has_permission(draft_reg_user, permissions.WRITE) is True assert registration.has_permission(node_user, permissions.WRITE) is False - @mock.patch('website.project.signals.after_create_registration') - def test_register_node_does_not_copy_group_members(self, mock_signal): - user = UserFactory() - node = NodeFactory(creator=user) - - group_mem = UserFactory() - group = OSFGroupFactory(creator=group_mem) - node.add_osf_group(group, permissions.READ) - node.save() - - assert node.has_permission(group_mem, permissions.READ) is True - - draft_reg = DraftRegistrationFactory(branched_from=node) - registration = node.register_node(get_default_metaschema(), Auth(user), draft_reg, None) - - assert registration.has_permission(user, permissions.ADMIN) is True - assert registration.has_permission(group_mem, permissions.READ) is False - @mock.patch('website.project.signals.after_create_registration') def test_register_node_makes_private_registration(self, mock_signal): user = UserFactory() @@ -2152,25 +2041,6 @@ def test_add_unregistered_raises_error_if_user_is_registered(self, node, auth): auth=auth ) - def test_add_unregistered_contributor_already_group_member(self, node, user, auth): - given_name = 'Grapes McGee' - username = 'fake@cos.io' - group = OSFGroupFactory(creator=user) - unreg_user = group.add_unregistered_member(given_name, username, auth=Auth(user)) - assert unreg_user.get_unclaimed_record(group._id)['email'] == username - - node.add_osf_group(group, permissions.ADMIN) - - node.add_unregistered_contributor( - email=username, - fullname=given_name, - auth=auth - ) - node.save - unreg_user.reload() - unclaimed_data = unreg_user.get_unclaimed_record(node._primary_key) - assert unclaimed_data['email'] == username - def test_find_by_institutions(): inst1, inst2 = InstitutionFactory(), InstitutionFactory() project = ProjectFactory(is_public=True) @@ -2203,11 +2073,6 @@ def test_can_comment(): noncontrib = UserFactory() assert private_node.can_comment(Auth(noncontrib)) is False - group_mem = UserFactory() - group = OSFGroupFactory(creator=group_mem) - private_node.add_osf_group(group, permissions.READ) - assert private_node.can_comment(Auth(group_mem)) is True - def test_parent_kwarg(): parent = NodeFactory() child = NodeFactory(parent=parent) @@ -2356,11 +2221,10 @@ def test_check_spam_on_private_node(self, project, user): assert not project.is_public @pytest.mark.enable_enqueue_task - @mock.patch('website.mails.send_mail') @mock.patch.object(settings, 'SPAM_SERVICES_ENABLED', True) @mock.patch.object(settings, 'SPAM_ACCOUNT_SUSPENSION_ENABLED', True) @pytest.mark.skip('Technically still true, but skipping because mocking is outdated') - def test_check_spam_on_private_node_bans_new_spam_user(self, mock_send_mail, project, user): + def test_check_spam_on_private_node_bans_new_spam_user(self, project, user): project.is_public = False project.save() with mock.patch('osf.models.AbstractNode._get_spam_content', mock.Mock(return_value='some content!')): @@ -2387,10 +2251,9 @@ def test_check_spam_on_private_node_bans_new_spam_user(self, mock_send_mail, pro project3.reload() assert project3.is_public is True - @mock.patch('website.mails.send_mail') @mock.patch.object(settings, 'SPAM_SERVICES_ENABLED', True) @mock.patch.object(settings, 'SPAM_ACCOUNT_SUSPENSION_ENABLED', True) - def test_check_spam_on_private_node_does_not_ban_existing_user(self, mock_send_mail, project, user): + def test_check_spam_on_private_node_does_not_ban_existing_user(self, project, user): project.is_public = False project.save() with mock.patch('osf.models.AbstractNode._get_spam_content', mock.Mock(return_value='some content!')): @@ -2721,46 +2584,22 @@ def test_manage_contributors_no_admins(self, node, auth): users, auth=auth, save=True, ) - def test_manage_contributors_no_registered_admins(self, node, auth): - unregistered = UnregUserFactory() - node.add_unregistered_contributor( - unregistered.fullname, - unregistered.email, - auth=Auth(node.creator), - permissions=ADMIN, - existing_user=unregistered - ) - users = [ - {'id': node.creator._id, 'permission': READ, 'visible': True}, - {'id': unregistered._id, 'permission': ADMIN, 'visible': True}, - ] - - group = OSFGroupFactory(creator=node.creator) - node.add_osf_group(group, permissions.ADMIN) - with pytest.raises(NodeStateError): - node.manage_contributors( - users, auth=auth, save=True, - ) - def test_get_admin_contributors(self, user, auth): read, write, admin = UserFactory(), UserFactory(), UserFactory() nonactive_admin = UserFactory() noncontrib = UserFactory() - group_member = UserFactory() - group = OSFGroupFactory(creator=group_member) project = ProjectFactory(creator=user) project.add_contributor(read, auth=auth, permissions=READ) project.add_contributor(write, auth=auth, permissions=WRITE) project.add_contributor(admin, auth=auth, permissions=ADMIN) project.add_contributor(nonactive_admin, auth=auth, permissions=ADMIN) - project.add_osf_group(group, permissions.ADMIN) project.save() nonactive_admin.is_disabled = True nonactive_admin.save() result = list(project.get_admin_contributors([ - read, write, admin, noncontrib, nonactive_admin, group_member + read, write, admin, noncontrib, nonactive_admin ])) assert admin in result @@ -2768,7 +2607,6 @@ def test_get_admin_contributors(self, user, auth): assert write not in result assert noncontrib not in result assert nonactive_admin not in result - assert group_member not in result # copied from tests/test_models.py class TestNodeTraversals: diff --git a/osf_tests/test_osfgroup.py b/osf_tests/test_osfgroup.py deleted file mode 100644 index 722cffd7bc7..00000000000 --- a/osf_tests/test_osfgroup.py +++ /dev/null @@ -1,1124 +0,0 @@ -from unittest import mock -import pytest -import time -from django.contrib.auth.models import Group -from django.core.exceptions import ValidationError - -from addons.github.tests import factories -from addons.osfstorage.models import OsfStorageFile -from framework.auth import Auth -from django.contrib.auth.models import AnonymousUser -from django.contrib.contenttypes.models import ContentType -from framework.exceptions import PermissionsError -from osf.models import OSFGroup, Node, OSFUser, OSFGroupLog, NodeLog -from osf.utils.permissions import MANAGER, MEMBER, MANAGE, READ, WRITE, ADMIN -from website.notifications.utils import get_all_node_subscriptions -from website.osf_groups import signals as group_signals -from .factories import ( - NodeFactory, - ProjectFactory, - AuthUserFactory, - OSFGroupFactory -) - -pytestmark = pytest.mark.django_db - -@pytest.fixture() -def manager(): - return AuthUserFactory() - -@pytest.fixture() -def member(): - return AuthUserFactory() - -@pytest.fixture() -def user(): - return AuthUserFactory() - -@pytest.fixture() -def user_two(): - return AuthUserFactory() - -@pytest.fixture() -def user_three(): - return AuthUserFactory() - -@pytest.fixture() -def auth(manager): - return Auth(manager) - -@pytest.fixture() -def project(manager): - return ProjectFactory(creator=manager) - -@pytest.fixture() -def osf_group(manager, member): - osf_group = OSFGroupFactory(creator=manager) - osf_group.make_member(member) - return osf_group - -class TestOSFGroup: - - def test_osf_group_creation(self, manager, member, user_two, fake): - osf_group = OSFGroup.objects.create(name=fake.bs(), creator=manager) - # OSFGroup creator given manage permissions - assert osf_group.has_permission(manager, MANAGE) is True - assert osf_group.has_permission(user_two, MANAGE) is False - - assert manager in osf_group.managers - assert manager in osf_group.members - assert manager not in osf_group.members_only - - user_two.is_superuser = True - user_two.save() - - # Superusers don't have permission to group - assert osf_group.has_permission(user_two, MEMBER) is False - - @mock.patch('website.osf_groups.views.mails.send_mail') - def test_make_manager(self, mock_send_mail, manager, member, user_two, user_three, osf_group): - # no permissions - with pytest.raises(PermissionsError): - osf_group.make_manager(user_two, Auth(user_three)) - - # member only - with pytest.raises(PermissionsError): - osf_group.make_manager(user_two, Auth(member)) - - # manage permissions - osf_group.make_manager(user_two, Auth(manager)) - assert osf_group.has_permission(user_two, MANAGE) is True - assert user_two in osf_group.managers - assert user_two in osf_group.members - assert mock_send_mail.call_count == 1 - - # upgrade to manager - osf_group.make_manager(member, Auth(manager)) - assert osf_group.has_permission(member, MANAGE) is True - assert member in osf_group.managers - assert member in osf_group.members - # upgrading an existing member does not re-send an email - assert mock_send_mail.call_count == 1 - - @mock.patch('website.osf_groups.views.mails.send_mail') - def test_make_member(self, mock_send_mail, manager, member, user_two, user_three, osf_group): - # no permissions - with pytest.raises(PermissionsError): - osf_group.make_member(user_two, Auth(user_three)) - - # member only - with pytest.raises(PermissionsError): - osf_group.make_member(user_two, Auth(member)) - - # manage permissions - osf_group.make_member(user_two, Auth(manager)) - assert osf_group.has_permission(user_two, MANAGE) is False - assert user_two not in osf_group.managers - assert user_two in osf_group.members - assert mock_send_mail.call_count == 1 - - # downgrade to member, sole manager - with pytest.raises(ValueError): - osf_group.make_member(manager, Auth(manager)) - - # downgrade to member - osf_group.make_manager(user_two, Auth(manager)) - assert user_two in osf_group.managers - assert user_two in osf_group.members - osf_group.make_member(user_two, Auth(manager)) - assert user_two not in osf_group.managers - assert user_two in osf_group.members - assert mock_send_mail.call_count == 1 - - @mock.patch('website.osf_groups.views.mails.send_mail') - def test_add_unregistered_member(self, mock_send_mail, manager, member, osf_group, user_two): - test_fullname = 'Test User' - test_email = 'test_member@cos.io' - test_manager_email = 'test_manager@cos.io' - - # Email already exists - with pytest.raises(ValueError): - osf_group.add_unregistered_member(test_fullname, user_two.username, auth=Auth(manager)) - - # Test need manager perms to add - with pytest.raises(PermissionsError): - osf_group.add_unregistered_member(test_fullname, test_email, auth=Auth(member)) - - # Add member - osf_group.add_unregistered_member(test_fullname, test_email, auth=Auth(manager)) - assert mock_send_mail.call_count == 1 - unreg_user = OSFUser.objects.get(username=test_email) - assert unreg_user in osf_group.members - assert unreg_user not in osf_group.managers - assert osf_group.has_permission(unreg_user, MEMBER) is True - assert osf_group._id in unreg_user.unclaimed_records - - # Attempt to add unreg user as a member - with pytest.raises(ValueError): - osf_group.add_unregistered_member(test_fullname, test_email, auth=Auth(manager)) - - # Add unregistered manager - osf_group.add_unregistered_member(test_fullname, test_manager_email, auth=Auth(manager), role=MANAGER) - assert mock_send_mail.call_count == 2 - unreg_manager = OSFUser.objects.get(username=test_manager_email) - assert unreg_manager in osf_group.members - assert unreg_manager in osf_group.managers - assert osf_group.has_permission(unreg_manager, MEMBER) is True - assert osf_group._id in unreg_manager.unclaimed_records - - # Add unregistered member with blocked email - with pytest.raises(ValidationError): - osf_group.add_unregistered_member(test_fullname, 'test@example.com', auth=Auth(manager), role=MANAGER) - - def test_remove_member(self, manager, member, user_three, osf_group): - new_member = AuthUserFactory() - osf_group.make_member(new_member) - assert new_member not in osf_group.managers - assert new_member in osf_group.members - - # no permissions - with pytest.raises(PermissionsError): - osf_group.remove_member(new_member, Auth(user_three)) - - # member only - with pytest.raises(PermissionsError): - osf_group.remove_member(new_member, Auth(member)) - - # manage permissions - osf_group.remove_member(new_member, Auth(manager)) - assert new_member not in osf_group.managers - assert new_member not in osf_group.members - - # Remove self - member can remove themselves - osf_group.remove_member(member, Auth(member)) - assert member not in osf_group.managers - assert member not in osf_group.members - - def test_remove_manager(self, manager, member, user_three, osf_group): - new_manager = AuthUserFactory() - osf_group.make_manager(new_manager) - # no permissions - with pytest.raises(PermissionsError): - osf_group.remove_member(new_manager, Auth(user_three)) - - # member only - with pytest.raises(PermissionsError): - osf_group.remove_member(new_manager, Auth(member)) - - # manage permissions - osf_group.remove_member(new_manager, Auth(manager)) - assert new_manager not in osf_group.managers - assert new_manager not in osf_group.members - - # can't remove last manager - with pytest.raises(ValueError): - osf_group.remove_member(manager, Auth(manager)) - assert manager in osf_group.managers - assert manager in osf_group.members - - @mock.patch('website.osf_groups.views.mails.send_mail') - def test_notify_group_member_email_does_not_send_before_throttle_expires(self, mock_send_mail, manager, osf_group): - member = AuthUserFactory() - assert member.member_added_email_records == {} - group_signals.member_added.send(osf_group, user=member, permission=WRITE, auth=Auth(manager)) - assert mock_send_mail.call_count == 1 - - record = member.member_added_email_records[osf_group._id] - assert record is not None - # 2nd call does not send email because throttle period has not expired - group_signals.member_added.send(osf_group, user=member, permission=WRITE, auth=Auth(manager)) - assert member.member_added_email_records[osf_group._id] == record - assert mock_send_mail.call_count == 1 - - @mock.patch('website.osf_groups.views.mails.send_mail') - def test_notify_group_member_email_sends_after_throttle_expires(self, mock_send_mail, osf_group, member, manager): - throttle = 0.5 - - member = AuthUserFactory() - assert member.member_added_email_records == {} - group_signals.member_added.send(osf_group, user=member, permission=WRITE, auth=Auth(manager), throttle=throttle) - assert mock_send_mail.call_count == 1 - - time.sleep(1) # throttle period expires - # 2nd call does not send email because throttle period has not expired - assert member.member_added_email_records[osf_group._id] is not None - group_signals.member_added.send(osf_group, user=member, permission=WRITE, auth=Auth(manager), throttle=throttle) - assert mock_send_mail.call_count == 2 - - @mock.patch('website.osf_groups.views.mails.send_mail') - def test_notify_group_unregistered_member_throttle(self, mock_send_mail, osf_group, member, manager): - throttle = 0.5 - - member = AuthUserFactory() - member.is_registered = False - member.add_unclaimed_record(osf_group, referrer=manager, given_name='grapes mcgee', email='grapes@cos.io') - member.save() - assert member.member_added_email_records == {} - group_signals.member_added.send(osf_group, user=member, permission=WRITE, auth=Auth(manager), throttle=throttle) - assert mock_send_mail.call_count == 1 - - assert member.member_added_email_records[osf_group._id] is not None - # 2nd call does not send email because throttle period has not expired - group_signals.member_added.send(osf_group, user=member, permission=WRITE, auth=Auth(manager)) - assert mock_send_mail.call_count == 1 - - time.sleep(1) # throttle period expires - # 2nd call does not send email because throttle period has not expired - assert member.member_added_email_records[osf_group._id] is not None - group_signals.member_added.send(osf_group, user=member, permission=WRITE, auth=Auth(manager), throttle=throttle) - assert mock_send_mail.call_count == 2 - - def test_rename_osf_group(self, manager, member, user_two, osf_group): - new_name = 'Platform Team' - # no permissions - with pytest.raises(PermissionsError): - osf_group.set_group_name(new_name, Auth(user_two)) - - # member only - with pytest.raises(PermissionsError): - osf_group.set_group_name(new_name, Auth(member)) - - # manage permissions - osf_group.set_group_name(new_name, Auth(manager)) - osf_group.save() - - assert osf_group.name == new_name - - def test_remove_group(self, manager, member, osf_group): - osf_group_name = osf_group.name - manager_group_name = osf_group.manager_group.name - member_group_name = osf_group.member_group.name - - osf_group.remove_group(Auth(manager)) - assert not OSFGroup.objects.filter(name=osf_group_name).exists() - assert not Group.objects.filter(name=manager_group_name).exists() - assert not Group.objects.filter(name=member_group_name).exists() - - assert manager_group_name not in manager.groups.values_list('name', flat=True) - - def test_remove_group_node_perms(self, manager, member, osf_group, project): - project.add_osf_group(osf_group, ADMIN) - assert project.has_permission(member, ADMIN) is True - - osf_group.remove_group(Auth(manager)) - - assert project.has_permission(member, ADMIN) is False - - def test_user_groups_property(self, manager, member, osf_group): - assert osf_group in manager.osf_groups - assert osf_group in member.osf_groups - - other_group = OSFGroupFactory() - - assert other_group not in manager.osf_groups - assert other_group not in member.osf_groups - - def test_user_group_roles(self, manager, member, user_three, osf_group): - assert manager.group_role(osf_group) == MANAGER - assert member.group_role(osf_group) == MEMBER - assert user_three.group_role(osf_group) is None - - def test_replace_contributor(self, manager, member, osf_group): - user = osf_group.add_unregistered_member('test_user', 'test@cos.io', auth=Auth(manager)) - assert user in osf_group.members - assert user not in osf_group.managers - assert ( - osf_group._id in - user.unclaimed_records.keys() - ) - osf_group.replace_contributor(user, member) - assert user not in osf_group.members - assert user not in osf_group.managers - assert osf_group.has_permission(member, MEMBER) is True - assert osf_group.has_permission(user, MEMBER) is False - - # test unclaimed_records is removed - assert ( - osf_group._id not in - user.unclaimed_records.keys() - ) - - def test_get_users_with_perm_osf_groups(self, project, manager, member, osf_group): - # Explicitly added as a contributor - read_users = project.get_users_with_perm(READ) - write_users = project.get_users_with_perm(WRITE) - admin_users = project.get_users_with_perm(ADMIN) - assert len(project.get_users_with_perm(READ)) == 1 - assert len(project.get_users_with_perm(WRITE)) == 1 - assert len(project.get_users_with_perm(ADMIN)) == 1 - assert manager in read_users - assert manager in write_users - assert manager in admin_users - - # Added through osf groups - project.add_osf_group(osf_group, WRITE) - read_users = project.get_users_with_perm(READ) - write_users = project.get_users_with_perm(WRITE) - admin_users = project.get_users_with_perm(ADMIN) - assert len(project.get_users_with_perm(READ)) == 2 - assert len(project.get_users_with_perm(WRITE)) == 2 - assert len(project.get_users_with_perm(ADMIN)) == 1 - assert member in read_users - assert member in write_users - assert member not in admin_users - - def test_merge_users_transfers_group_membership(self, member, manager, osf_group): - # merge member - other_user = AuthUserFactory() - other_user.merge_user(member) - other_user.save() - assert osf_group.is_member(other_user) - - # merge manager - other_other_user = AuthUserFactory() - other_other_user.merge_user(manager) - other_other_user.save() - assert osf_group.is_member(other_other_user) - assert osf_group.has_permission(other_other_user, MANAGE) - - def test_merge_users_already_group_manager(self, member, manager, osf_group): - # merge users - both users have group membership - different roles - manager.merge_user(member) - manager.save() - assert osf_group.has_permission(manager, MANAGE) - assert osf_group.is_member(member) is False - - def test_osf_group_is_admin_parent(self, project, manager, member, osf_group, user_two, user_three): - child = NodeFactory(parent=project, creator=manager) - assert project.is_admin_parent(manager) is True - assert project.is_admin_parent(member) is False - - project.add_contributor(user_two, WRITE, save=True) - assert project.is_admin_parent(user_two) is False - - assert child.is_admin_parent(manager) is True - child.add_contributor(user_two, ADMIN, save=True) - assert child.is_admin_parent(user_two) is True - - assert child.is_admin_parent(user_three) is False - osf_group.make_member(user_three) - project.add_osf_group(osf_group, WRITE) - assert child.is_admin_parent(user_three) is False - - project.update_osf_group(osf_group, ADMIN) - assert child.is_admin_parent(user_three) is True - assert child.is_admin_parent(user_three, include_group_admin=False) is False - project.remove_osf_group(osf_group) - - child.add_osf_group(osf_group, WRITE) - assert child.is_admin_parent(user_three) is False - child.update_osf_group(osf_group, ADMIN) - assert child.is_admin_parent(user_three) is True - assert child.is_admin_parent(user_three, include_group_admin=False) is False - - -class TestNodeGroups: - def test_node_contributors_and_group_members(self, manager, member, osf_group, project, user, user_two): - assert project.contributors_and_group_members.count() == 1 - project.add_osf_group(osf_group, ADMIN) - assert project.contributors_and_group_members.count() == 2 - project.add_contributor(user, WRITE) - project.add_contributor(user_two, READ) - project.save() - assert project.contributors_and_group_members.count() == 4 - - def test_add_osf_group_to_node_already_connected(self, manager, member, osf_group, project): - project.add_osf_group(osf_group, ADMIN) - assert project.has_permission(member, ADMIN) is True - - project.add_osf_group(osf_group, WRITE) - assert project.has_permission(member, ADMIN) is False - assert project.has_permission(member, WRITE) is True - - def test_osf_group_nodes(self, manager, member, project, osf_group): - nodes = osf_group.nodes - assert len(nodes) == 0 - project.add_osf_group(osf_group, READ) - assert project in osf_group.nodes - - project_two = ProjectFactory(creator=manager) - project_two.add_osf_group(osf_group, WRITE) - assert len(osf_group.nodes) == 2 - assert project_two in osf_group.nodes - - @mock.patch('website.osf_groups.views.mails.send_mail') - def test_add_osf_group_to_node(self, mock_send_mail, manager, member, user_two, osf_group, project): - # noncontributor - with pytest.raises(PermissionsError): - project.add_osf_group(osf_group, WRITE, auth=Auth(member)) - - # Non-admin on project - project.add_contributor(user_two, WRITE) - project.save() - with pytest.raises(PermissionsError): - project.add_osf_group(osf_group, WRITE, auth=Auth(user_two)) - - project.add_osf_group(osf_group, READ, auth=Auth(manager)) - assert mock_send_mail.call_count == 1 - # Manager was already a node admin - assert project.has_permission(manager, ADMIN) is True - assert project.has_permission(manager, WRITE) is True - assert project.has_permission(manager, READ) is True - - assert project.has_permission(member, ADMIN) is False - assert project.has_permission(member, WRITE) is False - assert project.has_permission(member, READ) is True - - project.update_osf_group(osf_group, WRITE, auth=Auth(manager)) - assert project.has_permission(member, ADMIN) is False - assert project.has_permission(member, WRITE) is True - assert project.has_permission(member, READ) is True - - project.update_osf_group(osf_group, ADMIN, auth=Auth(manager)) - assert project.has_permission(member, ADMIN) is True - assert project.has_permission(member, WRITE) is True - assert project.has_permission(member, READ) is True - - # project admin cannot add a group they are not a manager of - other_group = OSFGroupFactory() - with pytest.raises(PermissionsError): - project.add_osf_group(other_group, ADMIN, auth=Auth(project.creator)) - - @mock.patch('website.osf_groups.views.mails.send_mail') - def test_add_osf_group_to_node_emails_and_subscriptions(self, mock_send_mail, manager, member, user_two, osf_group, project): - osf_group.make_member(user_two) - - # Manager is already a node contributor - already has subscriptions - assert len(get_all_node_subscriptions(manager, project)) == 2 - assert len(get_all_node_subscriptions(member, project)) == 0 - assert len(get_all_node_subscriptions(user_two, project)) == 0 - assert mock_send_mail.call_count == 1 - - project.add_osf_group(osf_group, ADMIN, auth=Auth(manager)) - # Three members of group, but user adding group to node doesn't get email - assert mock_send_mail.call_count == 3 - assert len(get_all_node_subscriptions(manager, project)) == 2 - assert len(get_all_node_subscriptions(member, project)) == 2 - assert len(get_all_node_subscriptions(user_two, project)) == 2 - - project.remove_osf_group(osf_group, auth=Auth(manager)) - assert len(get_all_node_subscriptions(manager, project)) == 2 - assert len(get_all_node_subscriptions(member, project)) == 0 - assert len(get_all_node_subscriptions(user_two, project)) == 0 - - # Member is a contributor - project.add_contributor(member, WRITE, save=True) - assert len(get_all_node_subscriptions(manager, project)) == 2 - assert len(get_all_node_subscriptions(member, project)) == 2 - assert len(get_all_node_subscriptions(user_two, project)) == 0 - - project.add_osf_group(osf_group, ADMIN, auth=Auth(manager)) - assert len(get_all_node_subscriptions(manager, project)) == 2 - assert len(get_all_node_subscriptions(member, project)) == 2 - assert len(get_all_node_subscriptions(user_two, project)) == 2 - - project.remove_osf_group(osf_group, auth=Auth(manager)) - assert len(get_all_node_subscriptions(manager, project)) == 2 - assert len(get_all_node_subscriptions(member, project)) == 2 - assert len(get_all_node_subscriptions(user_two, project)) == 0 - - project.add_osf_group(osf_group, ADMIN, auth=Auth(manager)) - assert len(get_all_node_subscriptions(manager, project)) == 2 - assert len(get_all_node_subscriptions(member, project)) == 2 - assert len(get_all_node_subscriptions(user_two, project)) == 2 - - # Don't unsubscribe member because they belong to a group that has perms - project.remove_contributor(member, Auth(manager)) - assert len(get_all_node_subscriptions(manager, project)) == 2 - assert len(get_all_node_subscriptions(member, project)) == 2 - assert len(get_all_node_subscriptions(user_two, project)) == 2 - - @mock.patch('website.osf_groups.views.mails.send_mail') - def test_add_group_to_node_throttle(self, mock_send_mail, osf_group, manager, member, project): - throttle = 100 - assert manager.group_connected_email_records == {} - group_signals.group_added_to_node.send(osf_group, node=project, user=manager, permission=WRITE, auth=Auth(member), throttle=throttle) - assert mock_send_mail.call_count == 1 - - assert manager.group_connected_email_records[osf_group._id] is not None - # 2nd call does not send email because throttle period has not expired - group_signals.group_added_to_node.send(osf_group, node=project, user=manager, permission=WRITE, auth=Auth(member), throttle=throttle) - assert mock_send_mail.call_count == 1 - - throttle = 0.5 - - time.sleep(1) # throttle period expires - # 2nd call does not send email because throttle period has not expired - assert manager.group_connected_email_records[osf_group._id] is not None - group_signals.group_added_to_node.send(osf_group, node=project, user=manager, permission=WRITE, auth=Auth(member), throttle=throttle) - assert mock_send_mail.call_count == 2 - - def test_add_osf_group_to_node_default_permission(self, manager, member, osf_group, project): - project.add_osf_group(osf_group, auth=Auth(manager)) - - assert project.has_permission(manager, ADMIN) is True - assert project.has_permission(manager, WRITE) is True - assert project.has_permission(manager, READ) is True - - # osf_group given write permissions by default - assert project.has_permission(member, ADMIN) is False - assert project.has_permission(member, WRITE) is True - assert project.has_permission(member, READ) is True - - def test_update_osf_group_node(self, manager, member, user_two, user_three, osf_group, project): - project.add_osf_group(osf_group, ADMIN) - - assert project.has_permission(member, ADMIN) is True - assert project.has_permission(member, WRITE) is True - assert project.has_permission(member, READ) is True - - project.update_osf_group(osf_group, READ) - assert project.has_permission(member, ADMIN) is False - assert project.has_permission(member, WRITE) is False - assert project.has_permission(member, READ) is True - - project.update_osf_group(osf_group, WRITE) - assert project.has_permission(member, ADMIN) is False - assert project.has_permission(member, WRITE) is True - assert project.has_permission(member, READ) is True - - project.update_osf_group(osf_group, ADMIN) - assert project.has_permission(member, ADMIN) is True - assert project.has_permission(member, WRITE) is True - assert project.has_permission(member, READ) is True - - # Project admin who does not belong to the manager group can update group permissions - project.add_contributor(user_two, ADMIN, save=True) - project.update_osf_group(osf_group, READ, auth=Auth(user_two)) - assert project.has_permission(member, ADMIN) is False - assert project.has_permission(member, WRITE) is False - assert project.has_permission(member, READ) is True - - # Project write contributor cannot update group permissions - project.add_contributor(user_three, WRITE, save=True) - with pytest.raises(PermissionsError): - project.update_osf_group(osf_group, ADMIN, auth=Auth(user_three)) - assert project.has_permission(member, ADMIN) is False - - def test_remove_osf_group_from_node(self, manager, member, user_two, osf_group, project): - # noncontributor - with pytest.raises(PermissionsError): - project.remove_osf_group(osf_group, auth=Auth(member)) - - project.add_osf_group(osf_group, ADMIN, auth=Auth(manager)) - assert project.has_permission(member, ADMIN) is True - assert project.has_permission(member, WRITE) is True - assert project.has_permission(member, READ) is True - - project.remove_osf_group(osf_group, auth=Auth(manager)) - assert project.has_permission(member, ADMIN) is False - assert project.has_permission(member, WRITE) is False - assert project.has_permission(member, READ) is False - - # Project admin who does not belong to the manager group can remove the group - project.add_osf_group(osf_group, ADMIN, auth=Auth(manager)) - project.add_contributor(user_two, ADMIN) - project.save() - project.remove_osf_group(osf_group, auth=Auth(user_two)) - assert project.has_permission(member, ADMIN) is False - assert project.has_permission(member, WRITE) is False - assert project.has_permission(member, READ) is False - - # Manager who is not an admin can remove the group - user_three = AuthUserFactory() - osf_group.make_manager(user_three) - project.add_osf_group(osf_group, WRITE) - assert project.has_permission(user_three, ADMIN) is False - assert project.has_permission(user_three, WRITE) is True - assert project.has_permission(user_three, READ) is True - project.remove_osf_group(osf_group, auth=Auth(user_three)) - assert project.has_permission(user_three, ADMIN) is False - assert project.has_permission(user_three, WRITE) is False - assert project.has_permission(user_three, READ) is False - - def test_node_groups_property(self, manager, member, osf_group, project): - project.add_osf_group(osf_group, ADMIN, auth=Auth(manager)) - project.save() - assert osf_group in project.osf_groups - assert len(project.osf_groups) == 1 - - group_two = OSFGroupFactory(creator=manager) - project.add_osf_group(group_two, ADMIN, auth=Auth(manager)) - project.save() - assert group_two in project.osf_groups - assert len(project.osf_groups) == 2 - - def test_get_osf_groups_with_perms_property(self, manager, member, osf_group, project): - second_group = OSFGroupFactory(creator=manager) - third_group = OSFGroupFactory(creator=manager) - fourth_group = OSFGroupFactory(creator=manager) - OSFGroupFactory(creator=manager) - - project.add_osf_group(osf_group, ADMIN) - project.add_osf_group(second_group, WRITE) - project.add_osf_group(third_group, WRITE) - project.add_osf_group(fourth_group, READ) - - read_groups = project.get_osf_groups_with_perms(READ) - assert len(read_groups) == 4 - - write_groups = project.get_osf_groups_with_perms(WRITE) - assert len(write_groups) == 3 - - admin_groups = project.get_osf_groups_with_perms(ADMIN) - assert len(admin_groups) == 1 - - with pytest.raises(ValueError): - project.get_osf_groups_with_perms('crazy') - - def test_osf_group_node_can_view(self, project, manager, member, osf_group): - assert project.can_view(Auth(member)) is False - project.add_osf_group(osf_group, READ) - assert project.can_view(Auth(member)) is True - assert project.can_edit(Auth(member)) is False - - project.remove_osf_group(osf_group) - project.add_osf_group(osf_group, WRITE) - assert project.can_view(Auth(member)) is True - assert project.can_edit(Auth(member)) is True - - child = ProjectFactory(parent=project) - project.remove_osf_group(osf_group) - project.add_osf_group(osf_group, ADMIN) - # implicit OSF Group admin - assert child.can_view(Auth(member)) is True - assert child.can_edit(Auth(member)) is False - - grandchild = ProjectFactory(parent=child) - assert grandchild.can_view(Auth(member)) is True - assert grandchild.can_edit(Auth(member)) is False - - def test_node_has_permission(self, project, manager, member, osf_group): - assert project.can_view(Auth(member)) is False - project.add_osf_group(osf_group, READ) - assert project.has_permission(member, READ) is True - assert project.has_permission(member, WRITE) is False - assert osf_group.get_permission_to_node(project) == READ - - project.remove_osf_group(osf_group) - project.add_osf_group(osf_group, WRITE) - assert project.has_permission(member, READ) is True - assert project.has_permission(member, WRITE) is True - assert project.has_permission(member, ADMIN) is False - assert osf_group.get_permission_to_node(project) == WRITE - - child = ProjectFactory(parent=project) - project.remove_osf_group(osf_group) - project.add_osf_group(osf_group, ADMIN) - assert osf_group.get_permission_to_node(project) == ADMIN - # implicit OSF Group admin - assert child.has_permission(member, ADMIN) is False - assert child.has_permission(member, READ) is True - assert osf_group.get_permission_to_node(child) is None - - grandchild = ProjectFactory(parent=child) - assert grandchild.has_permission(member, WRITE) is False - assert grandchild.has_permission(member, READ) is True - - def test_node_get_permissions_override(self, project, manager, member, osf_group): - project.add_osf_group(osf_group, WRITE) - assert set(project.get_permissions(member)) == {READ, WRITE} - - project.remove_osf_group(osf_group) - project.add_osf_group(osf_group, READ) - assert set(project.get_permissions(member)) == {READ} - - anon = AnonymousUser() - assert project.get_permissions(anon) == [] - - def test_is_contributor(self, project, manager, member, osf_group): - assert project.is_contributor(manager) is True - assert project.is_contributor(member) is False - project.add_osf_group(osf_group, READ, auth=Auth(project.creator)) - assert project.is_contributor(member) is False - assert project.is_contributor_or_group_member(member) is True - - project.remove_osf_group(osf_group, auth=Auth(manager)) - assert project.is_contributor_or_group_member(member) is False - project.add_contributor(member, READ) - assert project.is_contributor(member) is True - assert project.is_contributor_or_group_member(member) is True - - def test_is_contributor_or_group_member(self, project, manager, member, osf_group): - project.add_osf_group(osf_group, ADMIN, auth=Auth(project.creator)) - assert project.is_contributor_or_group_member(member) is True - - project.remove_osf_group(osf_group, auth=Auth(manager)) - assert project.is_contributor_or_group_member(member) is False - project.add_osf_group(osf_group, WRITE, auth=Auth(project.creator)) - assert project.is_contributor_or_group_member(member) is True - - project.remove_osf_group(osf_group, auth=Auth(manager)) - assert project.is_contributor_or_group_member(member) is False - project.add_osf_group(osf_group, READ, auth=Auth(project.creator)) - assert project.is_contributor_or_group_member(member) is True - - project.remove_osf_group(osf_group, auth=Auth(manager)) - osf_group.add_unregistered_member('jane', 'janedoe@cos.io', Auth(manager)) - unreg = osf_group.members.get(username='janedoe@cos.io') - assert unreg.is_registered is False - assert project.is_contributor_or_group_member(unreg) is False - project.add_osf_group(osf_group, READ, auth=Auth(project.creator)) - assert project.is_contributor_or_group_member(unreg) is True - - child = ProjectFactory(parent=project) - assert child.is_contributor_or_group_member(manager) is False - - def test_node_object_can_view_osfgroups(self, manager, member, project, osf_group): - project.add_contributor(member, ADMIN, save=True) # Member is explicit admin contributor on project - child = NodeFactory(parent=project, creator=manager) # Member is implicit admin on child - grandchild = NodeFactory(parent=child, creator=manager) # Member is implicit admin on grandchild - - project_two = ProjectFactory(creator=manager) - project_two.add_osf_group(osf_group, ADMIN) # Member has admin permissions to project_two through osf_group - child_two = NodeFactory(parent=project_two, creator=manager) # Member has implicit admin on child_two through osf_group - grandchild_two = NodeFactory(parent=child_two, creator=manager) # Member has implicit admin perms on grandchild_two through osf_group - can_view = Node.objects.can_view(member) - assert len(can_view) == 6 - assert set(list(can_view.values_list('id', flat=True))) == {project.id, - child.id, - grandchild.id, - project_two.id, - child_two.id, - grandchild_two.id} - - grandchild_two.is_deleted = True - grandchild_two.save() - can_view = Node.objects.can_view(member) - assert len(can_view) == 5 - assert grandchild_two not in can_view - - def test_parent_admin_users_osf_groups(self, manager, member, user_two, project, osf_group): - child = NodeFactory(parent=project, creator=manager) - project.add_osf_group(osf_group, ADMIN) - # Manager has explict admin to child, member has implicit admin. - # Manager should be in admin_users, member should be in parent_admin_users - admin_users = child.get_users_with_perm(ADMIN) - assert manager in admin_users - assert member not in admin_users - - assert manager not in child.parent_admin_users - assert member in child.parent_admin_users - - user_two.is_superuser = True - user_two.save() - - assert user_two not in admin_users - assert user_two not in child.parent_admin_users - - -class TestOSFGroupLogging: - def test_logging(self, project, manager, member): - # Calling actions 2x in this test to assert we're not getting double logs - group = OSFGroup.objects.create(name='My Lab', creator_id=manager.id) - assert group.logs.count() == 2 - log = group.logs.last() - assert log.action == OSFGroupLog.GROUP_CREATED - assert log.user == manager - assert log.user == manager - assert log.params['group'] == group._id - - log = group.logs.first() - assert log.action == OSFGroupLog.MANAGER_ADDED - assert log.params['group'] == group._id - - group.make_member(member, Auth(manager)) - group.make_member(member, Auth(manager)) - assert group.logs.count() == 3 - log = group.logs.first() - assert log.action == OSFGroupLog.MEMBER_ADDED - assert log.user == manager - assert log.params['group'] == group._id - assert log.params['user'] == member._id - - group.make_manager(member, Auth(manager)) - group.make_manager(member, Auth(manager)) - assert group.logs.count() == 4 - log = group.logs.first() - assert log.action == OSFGroupLog.ROLE_UPDATED - assert log.user == manager - assert log.params['group'] == group._id - assert log.params['user'] == member._id - assert log.params['new_role'] == MANAGER - - group.make_member(member, Auth(manager)) - group.make_member(member, Auth(manager)) - log = group.logs.first() - assert group.logs.count() == 5 - assert log.action == OSFGroupLog.ROLE_UPDATED - assert log.user == manager - assert log.params['group'] == group._id - assert log.params['user'] == member._id - assert log.params['new_role'] == MEMBER - - group.remove_member(member, Auth(manager)) - group.remove_member(member, Auth(manager)) - assert group.logs.count() == 6 - log = group.logs.first() - assert log.action == OSFGroupLog.MEMBER_REMOVED - assert log.user == manager - assert log.params['group'] == group._id - assert log.params['user'] == member._id - - group.set_group_name('New Name', Auth(manager)) - group.set_group_name('New Name', Auth(manager)) - assert group.logs.count() == 7 - log = group.logs.first() - assert log.action == OSFGroupLog.EDITED_NAME - assert log.user == manager - assert log.params['group'] == group._id - assert log.params['name_original'] == 'My Lab' - - project.add_osf_group(group, WRITE, Auth(manager)) - project.add_osf_group(group, WRITE, Auth(manager)) - assert group.logs.count() == 8 - log = group.logs.first() - assert log.action == OSFGroupLog.NODE_CONNECTED - assert log.user == manager - assert log.params['group'] == group._id - assert log.params['node'] == project._id - assert log.params['permission'] == WRITE - node_log = project.logs.first() - - assert node_log.action == NodeLog.GROUP_ADDED - assert node_log.user == manager - assert node_log.params['group'] == group._id - assert node_log.params['node'] == project._id - assert node_log.params['permission'] == WRITE - - project.update_osf_group(group, READ, Auth(manager)) - project.update_osf_group(group, READ, Auth(manager)) - log = group.logs.first() - assert group.logs.count() == 9 - assert log.action == OSFGroupLog.NODE_PERMS_UPDATED - assert log.user == manager - assert log.params['group'] == group._id - assert log.params['node'] == project._id - assert log.params['permission'] == READ - node_log = project.logs.first() - - assert node_log.action == NodeLog.GROUP_UPDATED - assert node_log.user == manager - assert node_log.params['group'] == group._id - assert node_log.params['node'] == project._id - assert node_log.params['permission'] == READ - - project.remove_osf_group(group, Auth(manager)) - project.remove_osf_group(group, Auth(manager)) - assert group.logs.count() == 10 - log = group.logs.first() - assert log.action == OSFGroupLog.NODE_DISCONNECTED - assert log.user == manager - assert log.params['group'] == group._id - assert log.params['node'] == project._id - node_log = project.logs.first() - - assert node_log.action == NodeLog.GROUP_REMOVED - assert node_log.user == manager - assert node_log.params['group'] == group._id - assert node_log.params['node'] == project._id - - project.add_osf_group(group, WRITE, Auth(manager)) - project.add_osf_group(group, WRITE, Auth(manager)) - group.remove_group(auth=Auth(manager)) - - node_log = project.logs.first() - assert node_log.action == NodeLog.GROUP_REMOVED - assert node_log.user == manager - assert node_log.params['group'] == group._id - assert node_log.params['node'] == project._id - - -class TestRemovingContributorOrGroupMembers: - """ - Post OSF-Groups, the same kinds of checks you run when removing a contributor, - need to be run when a group is removed from a node (or a user is removed from a group, - or the group is deleted altogether). - - The actions are only executed if the user has no perms at all: no contributorship, - and no group membership - """ - - @pytest.fixture() - def project(self, user_two, user_three, external_account): - project = ProjectFactory(creator=user_two) - project.add_contributor(user_three, ADMIN) - project.add_addon('github', auth=Auth(user_two)) - project.creator.add_addon('github') - project.creator.external_accounts.add(external_account) - project.creator.save() - return project - - @pytest.fixture() - def file(self, project, user_two): - filename = 'my_file.txt' - project_file = OsfStorageFile.create( - target_object_id=project.id, - target_content_type=ContentType.objects.get_for_model(project), - path=f'/{filename}', - name=filename, - materialized_path=f'/{filename}') - - project_file.save() - from addons.osfstorage import settings as osfstorage_settings - - project_file.create_version(user_two, { - 'object': '06d80e', - 'service': 'cloud', - osfstorage_settings.WATERBUTLER_RESOURCE: 'osf', - }, { - 'size': 1337, - 'contentType': 'img/png' - }).save - project_file.checkout = user_two - project_file.save() - return project_file - - @pytest.fixture() - def external_account(self): - return factories.GitHubAccountFactory() - - @pytest.fixture() - def node_settings(self, project, external_account): - node_settings = project.get_addon('github') - user_settings = project.creator.get_addon('github') - user_settings.oauth_grants[project._id] = {external_account._id: []} - user_settings.save() - node_settings.user_settings = user_settings - node_settings.user = 'Queen' - node_settings.repo = 'Sheer-Heart-Attack' - node_settings.external_account = external_account - node_settings.save() - node_settings.set_auth - return node_settings - - def test_remove_contributor_no_member_perms(self, project, node_settings, user_two, user_three, request_context, file): - assert project.get_addon('github').user_settings is not None - assert file.checkout is not None - assert len(get_all_node_subscriptions(user_two, project)) == 2 - project.remove_contributor(user_two, Auth(user_three)) - project.reload() - - assert project.get_addon('github').user_settings is None - file.reload() - assert file.checkout is None - assert len(get_all_node_subscriptions(user_two, project)) == 0 - - def test_remove_group_from_node_no_contributor_perms(self, project, node_settings, user_two, user_three, request_context, file): - group = OSFGroupFactory(creator=user_two) - project.add_osf_group(group, ADMIN) - # Manually removing contributor - contrib_obj = project.contributor_set.get(user=user_two) - contrib_obj.delete() - project.clear_permissions(user_two) - - assert project.is_contributor(user_two) is False - assert project.is_contributor_or_group_member(user_two) is True - assert node_settings.user_settings is not None - project.remove_osf_group(group) - project.reload() - - assert project.get_addon('github').user_settings is None - file.reload() - assert file.checkout is None - assert len(get_all_node_subscriptions(user_two, project)) == 0 - - def test_remove_member_no_contributor_perms(self, project, node_settings, user_two, user_three, request_context, file): - group = OSFGroupFactory(creator=user_two) - project.add_osf_group(group, ADMIN) - group.make_manager(user_three) - # Manually removing contributor - contrib_obj = project.contributor_set.get(user=user_two) - contrib_obj.delete() - project.clear_permissions(user_two) - - assert project.is_contributor(user_two) is False - assert project.is_contributor_or_group_member(user_two) is True - assert node_settings.user_settings is not None - group.remove_member(user_two) - project.reload() - - assert project.get_addon('github').user_settings is None - file.reload() - assert file.checkout is None - assert len(get_all_node_subscriptions(user_two, project)) == 0 - - def test_delete_group_no_contributor_perms(self, project, node_settings, user_two, user_three, request_context, file): - group = OSFGroupFactory(creator=user_two) - project.add_osf_group(group, ADMIN) - group.make_manager(user_three) - # Manually removing contributor - contrib_obj = project.contributor_set.get(user=user_two) - contrib_obj.delete() - project.clear_permissions(user_two) - - assert project.is_contributor(user_two) is False - assert project.is_contributor_or_group_member(user_two) is True - assert node_settings.user_settings is not None - group.remove_group() - project.reload() - - assert project.get_addon('github').user_settings is None - file.reload() - assert file.checkout is None - assert len(get_all_node_subscriptions(user_two, project)) == 0 - - def test_remove_contributor_also_member(self, project, node_settings, user_two, user_three, request_context, file): - group = OSFGroupFactory(creator=user_two) - project.add_osf_group(group, ADMIN) - - assert project.is_contributor(user_two) is True - assert project.is_contributor_or_group_member(user_two) is True - assert node_settings.user_settings is not None - project.remove_osf_group(group) - project.reload() - - assert project.get_addon('github').user_settings is not None - file.reload() - assert file.checkout is not None - assert len(get_all_node_subscriptions(user_two, project)) == 2 - - def test_remove_osf_group_from_node_also_member(self, project, node_settings, user_two, user_three, request_context, file): - group = OSFGroupFactory(creator=user_two) - project.add_osf_group(group, ADMIN) - - assert project.is_contributor(user_two) is True - assert project.is_contributor_or_group_member(user_two) is True - assert node_settings.user_settings is not None - project.remove_osf_group(group) - project.reload() - - assert project.get_addon('github').user_settings is not None - file.reload() - assert file.checkout is not None - assert len(get_all_node_subscriptions(user_two, project)) == 2 - - def test_remove_member_also_contributor(self, project, node_settings, user_two, user_three, request_context, file): - group = OSFGroupFactory(creator=user_two) - group.make_manager(user_three) - project.add_osf_group(group, ADMIN) - - assert project.is_contributor(user_two) is True - assert project.is_contributor_or_group_member(user_two) is True - assert node_settings.user_settings is not None - group.remove_member(user_two) - project.reload() - assert project.get_addon('github').user_settings is not None - file.reload() - assert file.checkout is not None - assert len(get_all_node_subscriptions(user_two, project)) == 2 - - def test_delete_group_also_contributor(self, project, node_settings, user_two, user_three, request_context, file): - group = OSFGroupFactory(creator=user_two) - project.add_osf_group(group, ADMIN) - group.make_manager(user_three) - - assert project.is_contributor(user_two) is True - assert project.is_contributor_or_group_member(user_two) is True - assert node_settings.user_settings is not None - group.remove_group() - project.reload() - assert project.get_addon('github').user_settings is not None - file.reload() - assert file.checkout is not None - assert len(get_all_node_subscriptions(user_two, project)) == 2 diff --git a/osf_tests/test_queued_mail.py b/osf_tests/test_queued_mail.py index 4554b08579a..395b770a61d 100644 --- a/osf_tests/test_queued_mail.py +++ b/osf_tests/test_queued_mail.py @@ -3,7 +3,6 @@ import pytest -from unittest import mock from django.utils import timezone from waffle.testutils import override_switch @@ -35,28 +34,24 @@ def queue_mail(self, mail, user, send_at=None, **kwargs): ) return mail - @mock.patch('osf.models.queued_mail.send_mail') - def test_no_login_presend_for_active_user(self, mock_mail, user): + def test_no_login_presend_for_active_user(self, user): mail = self.queue_mail(mail=NO_LOGIN, user=user) user.date_last_login = timezone.now() + dt.timedelta(seconds=10) user.save() assert mail.send_mail() is False - @mock.patch('osf.models.queued_mail.send_mail') - def test_no_login_presend_for_inactive_user(self, mock_mail, user): + def test_no_login_presend_for_inactive_user(self, user): mail = self.queue_mail(mail=NO_LOGIN, user=user) user.date_last_login = timezone.now() - dt.timedelta(weeks=10) user.save() assert timezone.now() - dt.timedelta(days=1) > user.date_last_login assert bool(mail.send_mail()) is True - @mock.patch('osf.models.queued_mail.send_mail') - def test_no_addon_presend(self, mock_mail, user): + def test_no_addon_presend(self, user): mail = self.queue_mail(mail=NO_ADDON, user=user) assert mail.send_mail() is True - @mock.patch('osf.models.queued_mail.send_mail') - def test_new_public_project_presend_for_no_project(self, mock_mail, user): + def test_new_public_project_presend_for_no_project(self, user): mail = self.queue_mail( mail=NEW_PUBLIC_PROJECT, user=user, @@ -65,8 +60,7 @@ def test_new_public_project_presend_for_no_project(self, mock_mail, user): ) assert bool(mail.send_mail()) is False - @mock.patch('osf.models.queued_mail.send_mail') - def test_new_public_project_presend_success(self, mock_mail, user): + def test_new_public_project_presend_success(self, user): node = NodeFactory(is_public=True) mail = self.queue_mail( mail=NEW_PUBLIC_PROJECT, @@ -76,8 +70,7 @@ def test_new_public_project_presend_success(self, mock_mail, user): ) assert bool(mail.send_mail()) is True - @mock.patch('osf.models.queued_mail.send_mail') - def test_welcome_osf4m_presend(self, mock_mail, user): + def test_welcome_osf4m_presend(self, user): user.date_last_login = timezone.now() - dt.timedelta(days=13) user.save() mail = self.queue_mail( @@ -90,8 +83,7 @@ def test_welcome_osf4m_presend(self, mock_mail, user): assert bool(mail.send_mail()) is True assert mail.data['downloads'] == 0 - @mock.patch('osf.models.queued_mail.send_mail') - def test_finding_other_emails_sent_to_user(self, mock_mail, user): + def test_finding_other_emails_sent_to_user(self, user): mail = self.queue_mail( user=user, mail=NO_ADDON, @@ -100,16 +92,14 @@ def test_finding_other_emails_sent_to_user(self, mock_mail, user): mail.send_mail() assert len(mail.find_sent_of_same_type_and_user()) == 1 - @mock.patch('osf.models.queued_mail.send_mail') - def test_user_is_active(self, mock_mail, user): + def test_user_is_active(self, user): mail = self.queue_mail( user=user, mail=NO_ADDON, ) assert bool(mail.send_mail()) is True - @mock.patch('osf.models.queued_mail.send_mail') - def test_user_is_not_active_no_password(self, mock_mail): + def test_user_is_not_active_no_password(self): user = UserFactory.build() user.set_unusable_password() user.save() @@ -119,8 +109,7 @@ def test_user_is_not_active_no_password(self, mock_mail): ) assert mail.send_mail() is False - @mock.patch('osf.models.queued_mail.send_mail') - def test_user_is_not_active_not_registered(self, mock_mail): + def test_user_is_not_active_not_registered(self): user = UserFactory(is_registered=False) mail = self.queue_mail( user=user, @@ -128,8 +117,7 @@ def test_user_is_not_active_not_registered(self, mock_mail): ) assert mail.send_mail() is False - @mock.patch('osf.models.queued_mail.send_mail') - def test_user_is_not_active_is_merged(self, mock_mail): + def test_user_is_not_active_is_merged(self): other_user = UserFactory() user = UserFactory(merged_by=other_user) mail = self.queue_mail( @@ -138,8 +126,7 @@ def test_user_is_not_active_is_merged(self, mock_mail): ) assert mail.send_mail() is False - @mock.patch('osf.models.queued_mail.send_mail') - def test_user_is_not_active_is_disabled(self, mock_mail): + def test_user_is_not_active_is_disabled(self): user = UserFactory(date_disabled=timezone.now()) mail = self.queue_mail( user=user, @@ -147,8 +134,7 @@ def test_user_is_not_active_is_disabled(self, mock_mail): ) assert mail.send_mail() is False - @mock.patch('osf.models.queued_mail.send_mail') - def test_user_is_not_active_is_not_confirmed(self, mock_mail): + def test_user_is_not_active_is_not_confirmed(self): user = UserFactory(date_confirmed=None) mail = self.queue_mail( user=user, diff --git a/osf_tests/test_registration_moderation_notifications.py b/osf_tests/test_registration_moderation_notifications.py index ab4c7847e4d..100c15e64e1 100644 --- a/osf_tests/test_registration_moderation_notifications.py +++ b/osf_tests/test_registration_moderation_notifications.py @@ -9,7 +9,6 @@ from osf.migrations import update_provider_auth_groups from osf.models import Brand, NotificationDigest from osf.models.action import RegistrationAction -from osf.utils import machines from osf.utils.notifications import ( notify_submit, notify_accept_reject, @@ -25,9 +24,8 @@ RetractionFactory ) -from website import mails, settings +from website import settings from website.notifications import emails, tasks -from website.reviews import listeners def get_moderator(provider): @@ -46,9 +44,8 @@ def get_daily_moderator(provider): # Set USE_EMAIL to true and mock out the default mailer for consistency with other mocked settings -@mock.patch('website.mails.settings.USE_EMAIL', True) -@mock.patch('website.mails.tasks.send_email', mock.MagicMock()) @pytest.mark.django_db +@pytest.mark.usefixtures('mock_send_grid') class TestRegistrationMachineNotification: MOCK_NOW = timezone.now() @@ -140,7 +137,7 @@ def withdraw_action(self, registration, admin): ) return registration_action - def test_submit_notifications(self, registration, moderator, admin, contrib, provider): + def test_submit_notifications(self, registration, moderator, admin, contrib, provider, mock_send_grid): """ [REQS-96] "As moderator of branded registry, I receive email notification upon admin author(s) submission approval" :param mock_email: @@ -150,50 +147,15 @@ def test_submit_notifications(self, registration, moderator, admin, contrib, pro # Set up mock_send_mail as a pass-through to the original function. # This lets us assert on the call/args and also implicitly ensures # that the email acutally renders as normal in send_mail. - send_mail = mails.send_mail - with mock.patch.object(listeners.mails, 'send_mail', side_effect=send_mail) as mock_send_mail: - notify_submit(registration, admin) - - assert len(mock_send_mail.call_args_list) == 2 - admin_message, contrib_message = mock_send_mail.call_args_list + notify_submit(registration, admin) - assert admin_message == call( - admin.email, - mails.REVIEWS_SUBMISSION_CONFIRMATION, - document_type='registration', - domain='http://localhost:5000/', - draft_registration=registration.draft_registration.get(), - is_creator=True, - logo='osf_registries', - no_future_emails=[], - provider_contact_email=settings.OSF_CONTACT_EMAIL, - provider_support_email=settings.OSF_SUPPORT_EMAIL, - provider_name=provider.name, - provider_url='http://localhost:5000/', - referrer=admin, - reviewable=registration, - user=admin, - workflow=None - ) + assert len(mock_send_grid.call_args_list) == 2 + admin_message, contrib_message = mock_send_grid.call_args_list - assert contrib_message == call( - contrib.email, - mails.REVIEWS_SUBMISSION_CONFIRMATION, - document_type='registration', - domain='http://localhost:5000/', - draft_registration=registration.draft_registration.get(), - is_creator=False, - logo='osf_registries', - no_future_emails=[], - provider_contact_email=settings.OSF_CONTACT_EMAIL, - provider_support_email=settings.OSF_SUPPORT_EMAIL, - provider_name=provider.name, - provider_url='http://localhost:5000/', - referrer=admin, - reviewable=registration, - user=contrib, - workflow=None - ) + assert admin_message[1]['to_addr'] == admin.email + assert contrib_message[1]['to_addr'] == contrib.email + assert admin_message[1]['subject'] == 'Confirmation of your submission to OSF Registries' + assert contrib_message[1]['subject'] == 'Confirmation of your submission to OSF Registries' assert NotificationDigest.objects.count() == 1 digest = NotificationDigest.objects.last() @@ -365,7 +327,7 @@ def test_notify_moderator_registration_requests_withdrawal_notifications(self, m assert digest.event == 'new_pending_withdraw_requests' assert digest.provider == provider - def test_withdrawal_registration_accepted_notifications(self, registration_with_retraction, contrib, admin, withdraw_action): + def test_withdrawal_registration_accepted_notifications(self, registration_with_retraction, contrib, admin, withdraw_action, mock_send_grid): """ [REQS-109] "As registration author(s) requesting registration withdrawal, we receive notification email of moderator decision" @@ -378,52 +340,17 @@ def test_withdrawal_registration_accepted_notifications(self, registration_with_ # Set up mock_send_mail as a pass-through to the original function. # This lets us assert on the call count/args and also implicitly # ensures that the email acutally renders as normal in send_mail. - send_mail = mails.send_mail - with mock.patch.object(machines.mails, 'send_mail', side_effect=send_mail) as mock_email: - notify_withdraw_registration(registration_with_retraction, withdraw_action) + notify_withdraw_registration(registration_with_retraction, withdraw_action) - assert len(mock_email.call_args_list) == 2 - admin_message, contrib_message = mock_email.call_args_list + assert len(mock_send_grid.call_args_list) == 2 + admin_message, contrib_message = mock_send_grid.call_args_list - assert admin_message == call( - admin.email, - mails.WITHDRAWAL_REQUEST_GRANTED, - comment='yo', - contributor=admin, - document_type='registration', - domain='http://localhost:5000/', - draft_registration=registration_with_retraction.draft_registration.get(), - is_requester=True, - force_withdrawal=False, - notify_comment='yo', - provider_contact_email=settings.OSF_CONTACT_EMAIL, - provider_support_email=settings.OSF_SUPPORT_EMAIL, - provider_url='http://localhost:5000/', - requester=admin, - reviewable=registration_with_retraction, - workflow=None - ) + assert admin_message[1]['to_addr'] == admin.email + assert contrib_message[1]['to_addr'] == contrib.email + assert admin_message[1]['subject'] == 'Your registration has been withdrawn' + assert contrib_message[1]['subject'] == 'Your registration has been withdrawn' - assert contrib_message == call( - contrib.email, - mails.WITHDRAWAL_REQUEST_GRANTED, - comment='yo', - contributor=contrib, - document_type='registration', - domain='http://localhost:5000/', - draft_registration=registration_with_retraction.draft_registration.get(), - is_requester=False, - force_withdrawal=False, - notify_comment='yo', - provider_contact_email=settings.OSF_CONTACT_EMAIL, - provider_support_email=settings.OSF_SUPPORT_EMAIL, - provider_url='http://localhost:5000/', - requester=admin, - reviewable=registration_with_retraction, - workflow=None - ) - - def test_withdrawal_registration_rejected_notifications(self, registration, contrib, admin, withdraw_request_action): + def test_withdrawal_registration_rejected_notifications(self, registration, contrib, admin, withdraw_request_action, mock_send_grid): """ [REQS-109] "As registration author(s) requesting registration withdrawal, we receive notification email of moderator decision" @@ -436,46 +363,17 @@ def test_withdrawal_registration_rejected_notifications(self, registration, cont # Set up mock_send_mail as a pass-through to the original function. # This lets us assert on the call count/args and also implicitly # ensures that the email acutally renders as normal in send_mail. - send_mail = mails.send_mail - with mock.patch.object(machines.mails, 'send_mail', side_effect=send_mail) as mock_email: - notify_reject_withdraw_request(registration, withdraw_request_action) - - assert len(mock_email.call_args_list) == 2 - admin_message, contrib_message = mock_email.call_args_list + notify_reject_withdraw_request(registration, withdraw_request_action) - assert admin_message == call( - admin.email, - mails.WITHDRAWAL_REQUEST_DECLINED, - contributor=admin, - document_type='registration', - domain='http://localhost:5000/', - draft_registration=registration.draft_registration.get(), - is_requester=True, - provider_contact_email=settings.OSF_CONTACT_EMAIL, - provider_support_email=settings.OSF_SUPPORT_EMAIL, - provider_url='http://localhost:5000/', - requester=admin, - reviewable=registration, - workflow=None - ) + assert len(mock_send_grid.call_args_list) == 2 + admin_message, contrib_message = mock_send_grid.call_args_list - assert contrib_message == call( - contrib.email, - mails.WITHDRAWAL_REQUEST_DECLINED, - contributor=contrib, - document_type='registration', - domain='http://localhost:5000/', - draft_registration=registration.draft_registration.get(), - is_requester=False, - provider_contact_email=settings.OSF_CONTACT_EMAIL, - provider_support_email=settings.OSF_SUPPORT_EMAIL, - provider_url='http://localhost:5000/', - requester=admin, - reviewable=registration, - workflow=None - ) + assert admin_message[1]['to_addr'] == admin.email + assert contrib_message[1]['to_addr'] == contrib.email + assert admin_message[1]['subject'] == 'Your withdrawal request has been declined' + assert contrib_message[1]['subject'] == 'Your withdrawal request has been declined' - def test_withdrawal_registration_force_notifications(self, registration_with_retraction, contrib, admin, withdraw_action): + def test_withdrawal_registration_force_notifications(self, registration_with_retraction, contrib, admin, withdraw_action, mock_send_grid): """ [REQS-109] "As registration author(s) requesting registration withdrawal, we receive notification email of moderator decision" @@ -488,60 +386,25 @@ def test_withdrawal_registration_force_notifications(self, registration_with_ret # Set up mock_send_mail as a pass-through to the original function. # This lets us assert on the call count/args and also implicitly # ensures that the email acutally renders as normal in send_mail. - send_mail = mails.send_mail - with mock.patch.object(machines.mails, 'send_mail', side_effect=send_mail) as mock_email: - notify_withdraw_registration(registration_with_retraction, withdraw_action) + notify_withdraw_registration(registration_with_retraction, withdraw_action) - assert len(mock_email.call_args_list) == 2 - admin_message, contrib_message = mock_email.call_args_list + assert len(mock_send_grid.call_args_list) == 2 + admin_message, contrib_message = mock_send_grid.call_args_list - assert admin_message == call( - admin.email, - mails.WITHDRAWAL_REQUEST_GRANTED, - comment='yo', - contributor=admin, - document_type='registration', - domain='http://localhost:5000/', - draft_registration=registration_with_retraction.draft_registration.get(), - is_requester=True, - force_withdrawal=False, - notify_comment='yo', - provider_contact_email=settings.OSF_CONTACT_EMAIL, - provider_support_email=settings.OSF_SUPPORT_EMAIL, - provider_url='http://localhost:5000/', - requester=admin, - reviewable=registration_with_retraction, - workflow=None - ) - - assert contrib_message == call( - contrib.email, - mails.WITHDRAWAL_REQUEST_GRANTED, - comment='yo', - contributor=contrib, - document_type='registration', - domain='http://localhost:5000/', - draft_registration=registration_with_retraction.draft_registration.get(), - is_requester=False, - force_withdrawal=False, - notify_comment='yo', - provider_contact_email=settings.OSF_CONTACT_EMAIL, - provider_support_email=settings.OSF_SUPPORT_EMAIL, - provider_url='http://localhost:5000/', - requester=admin, - reviewable=registration_with_retraction, - workflow=None - ) + assert admin_message[1]['to_addr'] == admin.email + assert contrib_message[1]['to_addr'] == contrib.email + assert admin_message[1]['subject'] == 'Your registration has been withdrawn' + assert contrib_message[1]['subject'] == 'Your registration has been withdrawn' @pytest.mark.parametrize( 'digest_type, expected_recipient', [('email_transactional', get_moderator), ('email_digest', get_daily_moderator)] ) - def test_submissions_and_withdrawals_both_appear_in_moderator_digest(self, digest_type, expected_recipient, registration, admin, provider): + def test_submissions_and_withdrawals_both_appear_in_moderator_digest(self, digest_type, expected_recipient, registration, admin, provider, mock_send_grid): # Invoke the fixture function to get the recipient because parametrize expected_recipient = expected_recipient(provider) - with mock.patch('website.reviews.listeners.mails.send_mail'): - notify_submit(registration, admin) + + notify_submit(registration, admin) notify_moderator_registration_requests_withdrawal(registration, admin) # One user, one provider => one email @@ -566,16 +429,14 @@ def test_submsissions_and_withdrawals_do_not_appear_in_node_digest(self, digest_ assert not list(tasks.get_users_emails(digest_type)) - def test_moderator_digest_emails_render(self, registration, admin, moderator): + def test_moderator_digest_emails_render(self, registration, admin, moderator, mock_send_grid): notify_moderator_registration_requests_withdrawal(registration, admin) # Set up mock_send_mail as a pass-through to the original function. # This lets us assert on the call count/args and also implicitly # ensures that the email acutally renders as normal in send_mail. - send_mail = mails.send_mail - with mock.patch.object(tasks.mails, 'send_mail', side_effect=send_mail) as mock_send_mail: - tasks._send_reviews_moderator_emails('email_transactional') + tasks._send_reviews_moderator_emails('email_transactional') - mock_send_mail.assert_called() + mock_send_grid.assert_called() def test_branded_provider_notification_renders(self, registration, admin, moderator): # Set brand details to be checked in notify_base.mako diff --git a/osf_tests/test_reviewable.py b/osf_tests/test_reviewable.py index 1d25ca4adac..e3bc0b3d709 100644 --- a/osf_tests/test_reviewable.py +++ b/osf_tests/test_reviewable.py @@ -4,10 +4,10 @@ from osf.models import Preprint from osf.utils.workflows import DefaultStates from osf_tests.factories import PreprintFactory, AuthUserFactory -from website import mails @pytest.mark.django_db +@pytest.mark.usefixtures('mock_send_grid') class TestReviewable: @mock.patch('website.identifiers.utils.request_identifiers') @@ -34,21 +34,18 @@ def test_state_changes(self, _): from_db.refresh_from_db() assert from_db.machine_state == DefaultStates.ACCEPTED.value - @mock.patch('website.reviews.listeners.mails.send_mail') - def test_reject_resubmission_sends_emails(self, send_mail): + def test_reject_resubmission_sends_emails(self, mock_send_grid): user = AuthUserFactory() preprint = PreprintFactory( reviews_workflow='pre-moderation', is_published=False ) assert preprint.machine_state == DefaultStates.INITIAL.value - assert not send_mail.call_count + assert not mock_send_grid.call_count preprint.run_submit(user) - assert send_mail.call_count == 1 + assert mock_send_grid.call_count == 1 assert preprint.machine_state == DefaultStates.PENDING.value - mail_template = send_mail.call_args[0][1] - assert mail_template == mails.REVIEWS_SUBMISSION_CONFIRMATION assert not user.notification_subscriptions.exists() preprint.run_reject(user, 'comment') @@ -56,6 +53,4 @@ def test_reject_resubmission_sends_emails(self, send_mail): preprint.run_submit(user) # Resubmission alerts users and moderators assert preprint.machine_state == DefaultStates.PENDING.value - mail_template = send_mail.call_args[0][1] - assert send_mail.call_count == 2 - assert mail_template == mails.REVIEWS_RESUBMISSION_CONFIRMATION + assert mock_send_grid.call_count == 2 diff --git a/osf_tests/test_schema_responses.py b/osf_tests/test_schema_responses.py index 6c6699fb74c..40965c7cf31 100644 --- a/osf_tests/test_schema_responses.py +++ b/osf_tests/test_schema_responses.py @@ -8,9 +8,8 @@ from osf.models import schema_response # import module for mocking purposes from osf.utils.workflows import ApprovalStates, SchemaResponseTriggers from osf_tests.factories import AuthUserFactory, ProjectFactory, RegistrationFactory, RegistrationProviderFactory -from osf_tests.utils import get_default_test_schema, assert_notification_correctness, _ensure_subscriptions +from osf_tests.utils import get_default_test_schema, _ensure_subscriptions -from website.mails import mails from website.notifications import emails from transitions import MachineError @@ -96,6 +95,7 @@ def revised_response(initial_response): @pytest.mark.enable_bookmark_creation @pytest.mark.django_db +@pytest.mark.usefixtures('mock_send_grid') class TestCreateSchemaResponse(): def test_create_initial_response_sets_attributes(self, registration, schema): @@ -142,12 +142,11 @@ def test_create_initial_response_assigns_default_values(self, registration): for block in response.response_blocks.all(): assert block.response == DEFAULT_SCHEMA_RESPONSE_VALUES[block.schema_key] - def test_create_initial_response_does_not_notify(self, registration, admin_user): - with mock.patch.object(schema_response.mails, 'send_mail', autospec=True) as mock_send: - schema_response.SchemaResponse.create_initial_response( - parent=registration, initiator=admin_user - ) - assert not mock_send.called + def test_create_initial_response_does_not_notify(self, registration, admin_user, mock_send_grid): + schema_response.SchemaResponse.create_initial_response( + parent=registration, initiator=admin_user + ) + assert not mock_send_grid.called def test_create_initial_response_fails_if_no_schema_and_no_parent_schema(self, registration): registration.registered_schema.clear() @@ -253,18 +252,14 @@ def test_create_from_previous_response(self, registration, initial_response): assert set(revised_response.response_blocks.all()) == set(initial_response.response_blocks.all()) def test_create_from_previous_response_notification( - self, initial_response, admin_user, notification_recipients): - send_mail = mails.send_mail - with mock.patch.object(schema_response.mails, 'send_mail', autospec=True) as mock_send: - mock_send.side_effect = send_mail # implicitly test rendering - schema_response.SchemaResponse.create_from_previous_response( - previous_response=initial_response, initiator=admin_user - ) + self, initial_response, admin_user, notification_recipients, mock_send_grid): - assert_notification_correctness( - mock_send, mails.SCHEMA_RESPONSE_INITIATED, notification_recipients + schema_response.SchemaResponse.create_from_previous_response( + previous_response=initial_response, initiator=admin_user ) + assert mock_send_grid.called + @pytest.mark.parametrize( 'invalid_response_state', [ @@ -547,6 +542,7 @@ def test_delete_fails_if_state_is_invalid(self, invalid_response_state, initial_ @pytest.mark.django_db +@pytest.mark.usefixtures('mock_send_grid') class TestUnmoderatedSchemaResponseApprovalFlows(): def test_submit_response_adds_pending_approvers( @@ -578,29 +574,23 @@ def test_submit_response_writes_schema_response_action(self, initial_response, a assert new_action.trigger == SchemaResponseTriggers.SUBMIT.db_name def test_submit_response_notification( - self, revised_response, admin_user, notification_recipients): + self, revised_response, admin_user, notification_recipients, mock_send_grid): revised_response.approvals_state_machine.set_state(ApprovalStates.IN_PROGRESS) revised_response.update_responses({'q1': 'must change one response or can\'t submit'}) revised_response.revision_justification = 'has for valid revision_justification for submission' revised_response.save() - send_mail = mails.send_mail - with mock.patch.object(schema_response.mails, 'send_mail', autospec=True) as mock_send: - mock_send.side_effect = send_mail # implicitly test rendering - revised_response.submit(user=admin_user, required_approvers=[admin_user]) + revised_response.submit(user=admin_user, required_approvers=[admin_user]) - assert_notification_correctness( - mock_send, mails.SCHEMA_RESPONSE_SUBMITTED, notification_recipients - ) + assert mock_send_grid.called - def test_no_submit_notification_on_initial_response(self, initial_response, admin_user): + def test_no_submit_notification_on_initial_response(self, initial_response, admin_user, mock_send_grid): initial_response.approvals_state_machine.set_state(ApprovalStates.IN_PROGRESS) initial_response.update_responses({'q1': 'must change one response or can\'t submit'}) initial_response.revision_justification = 'has for valid revision_justification for submission' initial_response.save() - with mock.patch.object(schema_response.mails, 'send_mail', autospec=True) as mock_send: - initial_response.submit(user=admin_user, required_approvers=[admin_user]) - assert not mock_send.called + initial_response.submit(user=admin_user, required_approvers=[admin_user]) + assert not mock_send_grid.called def test_submit_response_requires_user(self, initial_response, admin_user): initial_response.approvals_state_machine.set_state(ApprovalStates.IN_PROGRESS) @@ -682,30 +672,23 @@ def test_approve_response_writes_schema_response_action( ).count() == 2 def test_approve_response_notification( - self, revised_response, admin_user, alternate_user, notification_recipients): + self, revised_response, admin_user, alternate_user, notification_recipients, mock_send_grid): revised_response.approvals_state_machine.set_state(ApprovalStates.UNAPPROVED) revised_response.save() revised_response.pending_approvers.add(admin_user, alternate_user) + mock_send_grid.reset_mock() + revised_response.approve(user=admin_user) + assert not mock_send_grid.called # Should only send email on final approval + revised_response.approve(user=alternate_user) + assert mock_send_grid.called - send_mail = mails.send_mail - with mock.patch.object(schema_response.mails, 'send_mail', autospec=True) as mock_send: - mock_send.side_effect = send_mail # implicitly test rendering - revised_response.approve(user=admin_user) - assert not mock_send.called # Should only send email on final approval - revised_response.approve(user=alternate_user) - - assert_notification_correctness( - mock_send, mails.SCHEMA_RESPONSE_APPROVED, notification_recipients - ) - - def test_no_approve_notification_on_initial_response(self, initial_response, admin_user): + def test_no_approve_notification_on_initial_response(self, initial_response, admin_user, mock_send_grid): initial_response.approvals_state_machine.set_state(ApprovalStates.UNAPPROVED) initial_response.save() initial_response.pending_approvers.add(admin_user) - with mock.patch.object(schema_response.mails, 'send_mail', autospec=True) as mock_send: - initial_response.approve(user=admin_user) - assert not mock_send.called + initial_response.approve(user=admin_user) + assert not mock_send_grid.called def test_approve_response_requires_user(self, initial_response, admin_user): initial_response.approvals_state_machine.set_state(ApprovalStates.UNAPPROVED) @@ -756,28 +739,22 @@ def test_reject_response_writes_schema_response_action(self, initial_response, a assert new_action.trigger == SchemaResponseTriggers.ADMIN_REJECT.db_name def test_reject_response_notification( - self, revised_response, admin_user, notification_recipients): + self, revised_response, admin_user, notification_recipients, mock_send_grid): revised_response.approvals_state_machine.set_state(ApprovalStates.UNAPPROVED) revised_response.save() revised_response.pending_approvers.add(admin_user) - send_mail = mails.send_mail - with mock.patch.object(schema_response.mails, 'send_mail', autospec=True) as mock_send: - mock_send.side_effect = send_mail # implicitly test rendering - revised_response.reject(user=admin_user) + revised_response.reject(user=admin_user) - assert_notification_correctness( - mock_send, mails.SCHEMA_RESPONSE_REJECTED, notification_recipients - ) + assert mock_send_grid.called - def test_no_reject_notification_on_initial_response(self, initial_response, admin_user): + def test_no_reject_notification_on_initial_response(self, initial_response, admin_user, mock_send_grid): initial_response.approvals_state_machine.set_state(ApprovalStates.UNAPPROVED) initial_response.save() initial_response.pending_approvers.add(admin_user) - with mock.patch.object(schema_response.mails, 'send_mail', autospec=True) as mock_send: - initial_response.reject(user=admin_user) - assert not mock_send.called + initial_response.reject(user=admin_user) + assert not mock_send_grid.called def test_reject_response_requires_user(self, initial_response, admin_user): initial_response.approvals_state_machine.set_state(ApprovalStates.UNAPPROVED) @@ -824,6 +801,7 @@ def test_internal_accept_clears_pending_approvers(self, initial_response, admin_ @pytest.mark.django_db +@pytest.mark.usefixtures('mock_send_grid') class TestModeratedSchemaResponseApprovalFlows(): @pytest.fixture @@ -870,16 +848,13 @@ def test_schema_response_action_to_state_following_moderated_approve_is_pending_ assert new_action.to_state == ApprovalStates.PENDING_MODERATION.db_name assert new_action.trigger == SchemaResponseTriggers.APPROVE.db_name - def test_accept_notification_sent_on_admin_approval(self, revised_response, admin_user): + def test_accept_notification_sent_on_admin_approval(self, revised_response, admin_user, mock_send_grid): revised_response.approvals_state_machine.set_state(ApprovalStates.UNAPPROVED) revised_response.save() revised_response.pending_approvers.add(admin_user) - send_mail = mails.send_mail - with mock.patch.object(schema_response.mails, 'send_mail', autospec=True) as mock_send: - mock_send.side_effect = send_mail - revised_response.approve(user=admin_user) - assert mock_send.called + revised_response.approve(user=admin_user) + assert mock_send_grid.called def test_moderators_notified_on_admin_approval(self, revised_response, admin_user, moderator): revised_response.approvals_state_machine.set_state(ApprovalStates.UNAPPROVED) @@ -925,27 +900,21 @@ def test_moderator_accept_writes_schema_response_action(self, initial_response, assert new_action.trigger == SchemaResponseTriggers.ACCEPT.db_name def test_moderator_accept_notification( - self, revised_response, moderator, notification_recipients): + self, revised_response, moderator, notification_recipients, mock_send_grid): revised_response.approvals_state_machine.set_state(ApprovalStates.PENDING_MODERATION) revised_response.save() - send_mail = mails.send_mail - with mock.patch.object(schema_response.mails, 'send_mail', autospec=True) as mock_send: - mock_send.side_effect = send_mail # implicitly test rendering - revised_response.accept(user=moderator) + revised_response.accept(user=moderator) - assert_notification_correctness( - mock_send, mails.SCHEMA_RESPONSE_APPROVED, notification_recipients - ) + assert mock_send_grid.called def test_no_moderator_accept_notification_on_initial_response( - self, initial_response, moderator): + self, initial_response, moderator, mock_send_grid): initial_response.approvals_state_machine.set_state(ApprovalStates.PENDING_MODERATION) initial_response.save() - with mock.patch.object(schema_response.mails, 'send_mail', autospec=True) as mock_send: - initial_response.accept(user=moderator) - assert not mock_send.called + initial_response.accept(user=moderator) + assert not mock_send_grid.called def test_moderator_reject(self, initial_response, admin_user, moderator): initial_response.approvals_state_machine.set_state(ApprovalStates.PENDING_MODERATION) @@ -969,27 +938,21 @@ def test_moderator_reject_writes_schema_response_action( assert new_action.trigger == SchemaResponseTriggers.MODERATOR_REJECT.db_name def test_moderator_reject_notification( - self, revised_response, moderator, notification_recipients): + self, revised_response, moderator, notification_recipients, mock_send_grid): revised_response.approvals_state_machine.set_state(ApprovalStates.PENDING_MODERATION) revised_response.save() - send_mail = mails.send_mail - with mock.patch.object(schema_response.mails, 'send_mail', autospec=True) as mock_send: - mock_send.side_effect = send_mail # implicitly test rendering - revised_response.reject(user=moderator) + revised_response.reject(user=moderator) - assert_notification_correctness( - mock_send, mails.SCHEMA_RESPONSE_REJECTED, notification_recipients - ) + assert mock_send_grid.called def test_no_moderator_reject_notification_on_initial_response( - self, initial_response, moderator): + self, initial_response, moderator, mock_send_grid): initial_response.approvals_state_machine.set_state(ApprovalStates.PENDING_MODERATION) initial_response.save() - with mock.patch.object(schema_response.mails, 'send_mail', autospec=True) as mock_send: - initial_response.reject(user=moderator) - assert not mock_send.called + initial_response.reject(user=moderator) + assert not mock_send_grid.called def test_moderator_cannot_submit(self, initial_response, moderator): initial_response.approvals_state_machine.set_state(ApprovalStates.IN_PROGRESS) diff --git a/osf_tests/test_user.py b/osf_tests/test_user.py index c031fcc344a..3a2e508dd2d 100644 --- a/osf_tests/test_user.py +++ b/osf_tests/test_user.py @@ -6,7 +6,6 @@ from urllib.parse import urlparse, urljoin, parse_qs from django.db import connection, transaction -from django.contrib.auth.models import Group from django.test.utils import CaptureQueriesContext from django.utils import timezone from django.conf import settings as django_conf_settings @@ -26,7 +25,6 @@ from osf.models import ( AbstractNode, OSFUser, - OSFGroup, Tag, Contributor, NotableDomain, @@ -55,7 +53,6 @@ ExternalAccountFactory, InstitutionFactory, NodeFactory, - OSFGroupFactory, PreprintProviderFactory, ProjectFactory, TagFactory, @@ -235,30 +232,6 @@ def test_merged_user_with_two_account_on_same_project_with_different_visibility_ assert project.get_visible(user) is True assert project.is_contributor(user2) is False - def test_merged_user_group_member_permissions_are_ignored(self, user): - user2 = UserFactory.build() - user2.save() - group = OSFGroupFactory(creator=user2) - - project = ProjectFactory(is_public=True) - project.add_osf_group(group, permissions.ADMIN) - assert project.has_permission(user2, permissions.ADMIN) - # Both the master and dupe are contributors - project.add_contributor(user2, log=False) - project.add_contributor(user, log=False) - project.set_permissions(user=user, permissions=permissions.READ) - project.set_permissions(user=user2, permissions=permissions.WRITE) - project.save() - user.merge_user(user2) - user.save() - project.reload() - - assert project.has_permission(user, permissions.ADMIN) is True - assert project.is_admin_contributor(user) is False - assert project.is_contributor(user2) is False - assert group.is_member(user) is True - assert group.is_member(user2) is False - def test_merge_projects(self): user = AuthUserFactory() user2 = AuthUserFactory() @@ -837,25 +810,6 @@ def test_has_osfstorage_usersettings(self, user): class TestProjectsInCommon: - def test_get_projects_in_common(self, user, auth): - user2 = UserFactory() - project = NodeFactory(creator=user) - project.add_contributor(contributor=user2, auth=auth) - project.save() - - group = OSFGroupFactory(creator=user, name='Platform') - group.make_member(user2) - group_project = ProjectFactory() - group_project.add_osf_group(group) - group_project.save() - - project_keys = {node._id for node in user.all_nodes} - projects = set(user.all_nodes) - user2_project_keys = {node._id for node in user2.all_nodes} - - assert {n._id for n in user.get_projects_in_common(user2)} == project_keys.intersection(user2_project_keys) - assert user.get_projects_in_common(user2) == projects.intersection(user2.all_nodes) - def test_n_projects_in_common(self, user, auth): user2 = UserFactory() user3 = UserFactory() @@ -864,13 +818,11 @@ def test_n_projects_in_common(self, user, auth): project.add_contributor(contributor=user2, auth=auth) project.save() - group = OSFGroupFactory(name='Platform', creator=user) - group.make_member(user3) - project.add_osf_group(group) + project.add_contributor(contributor=user, auth=auth) project.save() assert user.n_projects_in_common(user2) == 1 - assert user.n_projects_in_common(user3) == 1 + assert user.n_projects_in_common(user3) == 0 class TestCookieMethods: @@ -933,6 +885,7 @@ def test_get_user_by_cookie_no_session(self): assert OSFUser.from_cookie(cookie) is None +@pytest.mark.usefixtures('mock_send_grid') class TestChangePassword: def test_change_password(self, user): @@ -944,22 +897,19 @@ def test_change_password(self, user): user.change_password(old_password, new_password, confirm_password) assert bool(user.check_password(new_password)) is True - @mock.patch('website.mails.send_mail') - def test_set_password_notify_default(self, mock_send_mail, user): + def test_set_password_notify_default(self, mock_send_grid, user): old_password = 'password' user.set_password(old_password) user.save() - assert mock_send_mail.called is True + assert mock_send_grid.called is True - @mock.patch('website.mails.send_mail') - def test_set_password_no_notify(self, mock_send_mail, user): + def test_set_password_no_notify(self, mock_send_grid, user): old_password = 'password' user.set_password(old_password, notify=False) user.save() - assert mock_send_mail.called is False + assert mock_send_grid.called is False - @mock.patch('website.mails.send_mail') - def test_check_password_upgrade_hasher_no_notify(self, mock_send_mail, user, settings): + def test_check_password_upgrade_hasher_no_notify(self, mock_send_grid, user, settings): # NOTE: settings fixture comes from pytest-django. # changes get reverted after tests run settings.PASSWORD_HASHERS = ( @@ -970,7 +920,7 @@ def test_check_password_upgrade_hasher_no_notify(self, mock_send_mail, user, set user.password = 'sha1$lNb72DKWDv6P$e6ae16dada9303ae0084e14fc96659da4332bb05' user.check_password(raw_password) assert user.password.startswith('md5$') - assert mock_send_mail.called is False + assert mock_send_grid.called is False def test_change_password_invalid(self, old_password=None, new_password=None, confirm_password=None, error_message='Old password is invalid'): @@ -1795,9 +1745,6 @@ def test_contributor_to_property(self): project_to_be_invisible_on = ProjectFactory() project_to_be_invisible_on.add_contributor(self.user, visible=False) project_to_be_invisible_on.save() - group = OSFGroupFactory(creator=self.user, name='Platform') - group_project = ProjectFactory() - group_project.add_osf_group(group, permissions.READ) contributor_to_nodes = [node._id for node in self.user.contributor_to] @@ -1807,7 +1754,6 @@ def test_contributor_to_property(self): assert deleted_node._id not in contributor_to_nodes assert bookmark_collection_node._id not in contributor_to_nodes assert collection_node._id not in contributor_to_nodes - assert group_project._id not in contributor_to_nodes def test_contributor_or_group_member_to_property(self): normal_node = ProjectFactory(creator=self.user) @@ -1820,9 +1766,6 @@ def test_contributor_or_group_member_to_property(self): project_to_be_invisible_on = ProjectFactory() project_to_be_invisible_on.add_contributor(self.user, visible=False) project_to_be_invisible_on.save() - group = OSFGroupFactory(creator=self.user, name='Platform') - group_project = ProjectFactory() - group_project.add_osf_group(group, permissions.READ) registration = RegistrationFactory(creator=self.user) contributor_to_or_group_member_nodes = [node._id for node in self.user.contributor_or_group_member_to] @@ -1833,24 +1776,17 @@ def test_contributor_or_group_member_to_property(self): assert deleted_node._id not in contributor_to_or_group_member_nodes assert bookmark_collection_node._id not in contributor_to_or_group_member_nodes assert collection_node._id not in contributor_to_or_group_member_nodes - assert group_project._id in contributor_to_or_group_member_nodes assert registration._id in contributor_to_or_group_member_nodes def test_all_nodes_property(self): project = ProjectFactory(creator=self.user) - project_two = ProjectFactory() - - group = OSFGroupFactory(creator=self.user) - project_two.add_osf_group(group) - project_two.save() project_three = ProjectFactory() project_three.save() user_nodes = self.user.all_nodes - assert user_nodes.count() == 2 + assert user_nodes.count() == 1 assert project in user_nodes - assert project_two in user_nodes assert project_three not in user_nodes def test_visible_contributor_to_property(self): @@ -2238,47 +2174,6 @@ def test_cant_gdpr_delete_shared_node_if_only_admin(self, user, project_user_is_ assert exc_info.value.args[0] == 'You cannot delete Node {} because it would' \ ' be a Node with contributors, but with no admin.'.format(project_user_is_only_admin._id) - def test_cant_gdpr_delete_osf_group_if_only_manager(self, user): - group = OSFGroupFactory(name='My Group', creator=user) - osf_group_name = group.name - manager_group_name = group.manager_group.name - member_group_name = group.member_group.name - member = AuthUserFactory() - group.make_member(member) - - with pytest.raises(UserStateError) as exc_info: - user.gdpr_delete() - - assert exc_info.value.args[0] == 'You cannot delete this user because ' \ - 'they are the only registered manager of OSFGroup ' \ - '{} that contains other members.'.format(group._id) - - unregistered = group.add_unregistered_member('fake_user', 'fake_email@cos.io', Auth(user), 'manager') - assert len(group.managers) == 2 - - with pytest.raises(UserStateError) as exc_info: - user.gdpr_delete() - - assert exc_info.value.args[0] == 'You cannot delete this user because ' \ - 'they are the only registered manager of OSFGroup ' \ - '{} that contains other members.'.format(group._id) - - group.remove_member(member) - member.gdpr_delete() - # User is not the last member in the group, so they are just removed - assert OSFGroup.objects.filter(name=osf_group_name).exists() - assert Group.objects.filter(name=manager_group_name).exists() - assert Group.objects.filter(name=member_group_name).exists() - assert group.is_member(member) is False - assert group.is_manager(member) is False - - group.remove_member(unregistered) - user.gdpr_delete() - # Group was deleted because user was the only member - assert not OSFGroup.objects.filter(name=osf_group_name).exists() - assert not Group.objects.filter(name=manager_group_name).exists() - assert not Group.objects.filter(name=member_group_name).exists() - def test_cant_gdpr_delete_with_addon_credentials(self, user, project_with_two_admins_and_addon_credentials): with pytest.raises(UserStateError) as exc_info: diff --git a/osf_tests/utils.py b/osf_tests/utils.py index a8364a15478..b3f3c92bc88 100644 --- a/osf_tests/utils.py +++ b/osf_tests/utils.py @@ -16,7 +16,7 @@ Sanction, RegistrationProvider, RegistrationSchema, - NotificationSubscription + NotificationSubscriptionLegacy ) from osf.utils.migrations import create_schema_blocks_for_atomic_schema @@ -229,7 +229,7 @@ def _ensure_subscriptions(provider): Avoid that. ''' for subscription in provider.DEFAULT_SUBSCRIPTIONS: - NotificationSubscription.objects.get_or_create( + NotificationSubscriptionLegacy.objects.get_or_create( _id=f'{provider._id}_{subscription}', event_name=subscription, provider=provider diff --git a/scripts/add_global_subscriptions.py b/scripts/add_global_subscriptions.py index b326c6f9f67..52746875d79 100644 --- a/scripts/add_global_subscriptions.py +++ b/scripts/add_global_subscriptions.py @@ -6,13 +6,13 @@ import logging import sys +from osf.models.notifications import NotificationSubscriptionLegacy from website.app import setup_django setup_django() from django.apps import apps from django.db import transaction from website.app import init_app -from osf.models import NotificationSubscription from website.notifications import constants from website.notifications.utils import to_subscription_key @@ -35,10 +35,10 @@ def add_global_subscriptions(dry=True): for user_event in user_events: user_event_id = to_subscription_key(user._id, user_event) - subscription = NotificationSubscription.load(user_event_id) + subscription = NotificationSubscriptionLegacy.load(user_event_id) if not subscription: logger.info(f'No {user_event} subscription found for user {user._id}. Subscribing...') - subscription = NotificationSubscription(_id=user_event_id, owner=user, event_name=user_event) + subscription = NotificationSubscriptionLegacy(_id=user_event_id, owner=user, event_name=user_event) subscription.save() # Need to save in order to access m2m fields subscription.add_user_to_subscription(user, notification_type) subscription.save() diff --git a/scripts/fix_merged_user_quickfiles.py b/scripts/fix_merged_user_quickfiles.py deleted file mode 100644 index 8d0d1a89b71..00000000000 --- a/scripts/fix_merged_user_quickfiles.py +++ /dev/null @@ -1,34 +0,0 @@ -import logging -import sys - -from django.db import transaction -from django.db.models import F, Count - -from website.app import setup_django -setup_django() -from osf.models import QuickFilesNode -from scripts import utils as script_utils - - -logger = logging.getLogger(__name__) - -def main(): - dry = '--dry' in sys.argv - if not dry: - # If we're not running in dry mode log everything to a file - script_utils.add_file_logger(logger, __file__) - with transaction.atomic(): - qs = QuickFilesNode.objects.exclude(_contributors=F('creator')).annotate(contrib_count=Count('_contributors')).exclude(contrib_count=0) - logger.info(f'Found {qs.count()} quickfiles nodes with mismatched creator and _contributors') - - for node in qs: - bad_contrib = node._contributors.get() - logger.info(f'Fixing {node._id} (quickfiles node): Replacing {bad_contrib._id} (bad contributor) with {node.creator._id} (creator)') - node.contributor_set.filter(user=bad_contrib).update(user=node.creator) - node.save() - if dry: - raise Exception('Abort Transaction - Dry Run') - print('Done') - -if __name__ == '__main__': - main() diff --git a/scripts/generate_sitemap.py b/scripts/generate_sitemap.py index c0b38739789..d7c92deae34 100644 --- a/scripts/generate_sitemap.py +++ b/scripts/generate_sitemap.py @@ -185,7 +185,7 @@ def generate(self): # AbstractNode urls (Nodes and Registrations, no Collections) objs = (AbstractNode.objects .filter(is_public=True, is_deleted=False, retraction_id__isnull=True) - .exclude(type__in=['osf.collection', 'osf.quickfilesnode']) + .exclude(type__in=['osf.collection']) .values('guids___id', 'modified')) progress.start(objs.count(), 'NODE: ') for obj in objs: diff --git a/scripts/populate_new_and_noteworthy_projects.py b/scripts/populate_new_and_noteworthy_projects.py index 9f12abc17e9..843193a6868 100644 --- a/scripts/populate_new_and_noteworthy_projects.py +++ b/scripts/populate_new_and_noteworthy_projects.py @@ -108,12 +108,15 @@ def main(dry_run=True): update_node_links(new_and_noteworthy_links_node, new_and_noteworthy_node_ids, 'new and noteworthy') - try: - new_and_noteworthy_links_node.save() - logger.info(f'Node links on {new_and_noteworthy_links_node._id} updated.') - except (KeyError, RuntimeError) as error: - logger.error('Could not migrate new and noteworthy nodes due to error') - logger.exception(error) + if new_and_noteworthy_node_ids: + try: + new_and_noteworthy_links_node.save() + logger.info(f'Node links on {new_and_noteworthy_links_node._id} updated.') + except (KeyError, RuntimeError) as error: + logger.error('Could not migrate new and noteworthy nodes due to error') + logger.exception(error) + else: + logger.error('No new and noteworthy node ids found.') if dry_run: raise RuntimeError('Dry run -- transaction rolled back.') diff --git a/scripts/remove_notification_subscriptions_from_registrations.py b/scripts/remove_notification_subscriptions_from_registrations.py index 8984cb25b50..94b20a19a93 100644 --- a/scripts/remove_notification_subscriptions_from_registrations.py +++ b/scripts/remove_notification_subscriptions_from_registrations.py @@ -17,7 +17,7 @@ def remove_notification_subscriptions_from_registrations(dry_run=True): Registration = apps.get_model('osf.Registration') NotificationSubscription = apps.get_model('osf.NotificationSubscription') - notifications_to_delete = NotificationSubscription.objects.filter(node__type='osf.registration') + notifications_to_delete = NotificationSubscriptionLegacy.objects.filter(node__type='osf.registration') registrations_affected = Registration.objects.filter( id__in=notifications_to_delete.values_list( 'node_id', flat=True diff --git a/scripts/tests/test_deactivate_requested_accounts.py b/scripts/tests/test_deactivate_requested_accounts.py index 765bf29bb18..07e43f74bf2 100644 --- a/scripts/tests/test_deactivate_requested_accounts.py +++ b/scripts/tests/test_deactivate_requested_accounts.py @@ -1,14 +1,12 @@ import pytest -from unittest import mock from osf_tests.factories import ProjectFactory, AuthUserFactory from osf.management.commands.deactivate_requested_accounts import deactivate_requested_accounts -from website import mails, settings - @pytest.mark.django_db +@pytest.mark.usefixtures('mock_send_grid') class TestDeactivateRequestedAccount: @pytest.fixture() @@ -26,8 +24,7 @@ def user_requested_deactivation_with_node(self): user.save() return user - @mock.patch('osf.management.commands.deactivate_requested_accounts.mails.send_mail') - def test_deactivate_user_with_no_content(self, mock_mail, user_requested_deactivation): + def test_deactivate_user_with_no_content(self, mock_send_grid, user_requested_deactivation): deactivate_requested_accounts(dry_run=False) user_requested_deactivation.reload() @@ -35,22 +32,13 @@ def test_deactivate_user_with_no_content(self, mock_mail, user_requested_deactiv assert user_requested_deactivation.requested_deactivation assert user_requested_deactivation.contacted_deactivation assert user_requested_deactivation.is_disabled - mock_mail.assert_called_with(can_change_preferences=False, - mail=mails.REQUEST_DEACTIVATION_COMPLETE, - to_addr=user_requested_deactivation.username, - contact_email=settings.OSF_CONTACT_EMAIL, - user=user_requested_deactivation) + mock_send_grid.assert_called() - @mock.patch('osf.management.commands.deactivate_requested_accounts.mails.send_mail') - def test_deactivate_user_with_content(self, mock_mail, user_requested_deactivation_with_node): + def test_deactivate_user_with_content(self, mock_send_grid, user_requested_deactivation_with_node): deactivate_requested_accounts(dry_run=False) user_requested_deactivation_with_node.reload() assert user_requested_deactivation_with_node.requested_deactivation assert not user_requested_deactivation_with_node.is_disabled - mock_mail.assert_called_with(can_change_preferences=False, - mail=mails.REQUEST_DEACTIVATION, - to_addr=settings.OSF_SUPPORT_EMAIL, - user=user_requested_deactivation_with_node) - + mock_send_grid.assert_called() diff --git a/scripts/tests/test_send_queued_mails.py b/scripts/tests/test_send_queued_mails.py index 142eb75c4a6..2815b85f5d9 100644 --- a/scripts/tests/test_send_queued_mails.py +++ b/scripts/tests/test_send_queued_mails.py @@ -10,7 +10,8 @@ from scripts.send_queued_mails import main, pop_and_verify_mails_for_each_user, find_queued_mails_ready_to_be_sent from website import settings - +@mock.patch('website.mails.settings.USE_EMAIL', True) +@mock.patch('website.mails.settings.USE_CELERY', False) class TestSendQueuedMails(OsfTestCase): def setUp(self): @@ -20,6 +21,10 @@ def setUp(self): self.user.osf_mailing_lists[settings.OSF_HELP_LIST] = True self.user.save() + from conftest import start_mock_send_grid + self.mock_send_grid = start_mock_send_grid(self) + + def queue_mail(self, mail_type=NO_ADDON, user=None, send_at=None): return queue_mail( to_addr=user.username if user else self.user.username, @@ -29,21 +34,19 @@ def queue_mail(self, mail_type=NO_ADDON, user=None, send_at=None): fullname=user.fullname if user else self.user.fullname, ) - @mock.patch('osf.models.queued_mail.send_mail') - def test_queue_addon_mail(self, mock_send): + def test_queue_addon_mail(self): self.queue_mail() main(dry_run=False) - assert mock_send.called + assert self.mock_send_grid.called - @mock.patch('osf.models.queued_mail.send_mail') - def test_no_two_emails_to_same_person(self, mock_send): + def test_no_two_emails_to_same_person(self): user = UserFactory() user.osf_mailing_lists[settings.OSF_HELP_LIST] = True user.save() self.queue_mail(user=user) self.queue_mail(user=user) main(dry_run=False) - assert mock_send.call_count == 1 + assert self.mock_send_grid.call_count == 1 def test_pop_and_verify_mails_for_each_user(self): user_with_email_sent = UserFactory() diff --git a/scripts/tests/test_triggered_mails.py b/scripts/tests/test_triggered_mails.py index ca583423fbb..b0b94a7f7c5 100644 --- a/scripts/tests/test_triggered_mails.py +++ b/scripts/tests/test_triggered_mails.py @@ -38,8 +38,7 @@ def test_trigger_no_login_mail(self, mock_queue): send_at=mock.ANY, ) - @mock.patch('website.mails.send_mail') - def test_find_inactive_users_with_no_inactivity_email_sent_or_queued(self, mock_mail): + def test_find_inactive_users_with_no_inactivity_email_sent_or_queued(self): user_active = UserFactory(fullname='Spot') user_inactive = UserFactory(fullname='Nucha') user_already_received_mail = UserFactory(fullname='Pep') diff --git a/tasks/__init__.py b/tasks/__init__.py index 28fd49e2a51..680afe87bc6 100755 --- a/tasks/__init__.py +++ b/tasks/__init__.py @@ -404,6 +404,9 @@ def test_module(ctx, module=None, numprocesses=None, nocapture=False, params=Non ADMIN_TESTS = [ 'admin_tests', ] +MAILHOG_TESTS = [ + 'api_tests/mailhog', +] @task @@ -440,6 +443,13 @@ def test_api3(ctx, numprocesses=None, coverage=False, testmon=False, junit=False test_module(ctx, module=API_TESTS3, numprocesses=numprocesses, coverage=coverage, testmon=testmon, junit=junit) +@task +def test_mailhog(ctx, numprocesses=None, coverage=False, testmon=False, junit=False): + """Run the MAILHOG test suite.""" + print(f'Testing modules "{MAILHOG_TESTS}"') + test_module(ctx, module=MAILHOG_TESTS, numprocesses=numprocesses, coverage=coverage, testmon=testmon, junit=junit) + + @task def test_admin(ctx, numprocesses=None, coverage=False, testmon=False, junit=False): """Run the Admin test suite.""" @@ -471,6 +481,7 @@ def test(ctx, all=False, lint=False): test_addons(ctx) # TODO: Enable admin tests test_admin(ctx) + test_mailhog(ctx) @task def remove_failures_from_testmon(ctx, db_path=None): @@ -520,6 +531,11 @@ def test_ci_api3_and_osf(ctx, numprocesses=None, coverage=False, testmon=False, #ci_setup(ctx) test_api3(ctx, numprocesses=numprocesses, coverage=coverage, testmon=testmon, junit=junit) + +@task +def test_ci_mailhog(ctx, numprocesses=None, coverage=False, testmon=False, junit=False): + test_mailhog(ctx, numprocesses=numprocesses, coverage=coverage, testmon=testmon, junit=junit) + @task def wheelhouse(ctx, addons=False, release=False, dev=False, pty=True): """Build wheels for python dependencies. diff --git a/tests/test_adding_contributor_views.py b/tests/test_adding_contributor_views.py index 83ca7180388..17c2da39bc3 100644 --- a/tests/test_adding_contributor_views.py +++ b/tests/test_adding_contributor_views.py @@ -49,8 +49,11 @@ send_claim_registered_email, ) from website.util.metrics import OsfSourceTags, OsfClaimedTags, provider_source_tag, provider_claimed_tag +from conftest import start_mock_send_grid @pytest.mark.enable_implicit_clean +@mock.patch('website.mails.settings.USE_EMAIL', True) +@mock.patch('website.mails.settings.USE_CELERY', False) class TestAddingContributorViews(OsfTestCase): def setUp(self): @@ -61,6 +64,8 @@ def setUp(self): # Authenticate all requests contributor_added.connect(notify_added_contributor) + self.mock_send_grid = start_mock_send_grid(self) + def test_serialize_unregistered_without_record(self): name, email = fake.name(), fake_email() res = serialize_unregistered(fullname=name, email=email) @@ -211,8 +216,7 @@ def test_add_contributors_post_only_sends_one_email_to_unreg_user( # finalize_invitation should only have been called once assert mock_send_claim_email.call_count == 1 - @mock.patch('website.mails.send_mail') - def test_add_contributors_post_only_sends_one_email_to_registered_user(self, mock_send_mail): + def test_add_contributors_post_only_sends_one_email_to_registered_user(self): # Project has components comp1 = NodeFactory(creator=self.creator, parent=self.project) comp2 = NodeFactory(creator=self.creator, parent=self.project) @@ -237,10 +241,9 @@ def test_add_contributors_post_only_sends_one_email_to_registered_user(self, moc self.app.post(url, json=payload, auth=self.creator.auth) # send_mail should only have been called once - assert mock_send_mail.call_count == 1 + assert self.mock_send_grid.call_count == 1 - @mock.patch('website.mails.send_mail') - def test_add_contributors_post_sends_email_if_user_not_contributor_on_parent_node(self, mock_send_mail): + def test_add_contributors_post_sends_email_if_user_not_contributor_on_parent_node(self): # Project has a component with a sub-component component = NodeFactory(creator=self.creator, parent=self.project) sub_component = NodeFactory(creator=self.creator, parent=component) @@ -265,7 +268,7 @@ def test_add_contributors_post_sends_email_if_user_not_contributor_on_parent_nod self.app.post(url, json=payload, auth=self.creator.auth) # send_mail is called for both the project and the sub-component - assert mock_send_mail.call_count == 2 + assert self.mock_send_grid.call_count == 2 @mock.patch('website.project.views.contributor.send_claim_email') def test_email_sent_when_unreg_user_is_added(self, send_mail): @@ -286,8 +289,7 @@ def test_email_sent_when_unreg_user_is_added(self, send_mail): self.app.post(url, json=payload, follow_redirects=True, auth=self.creator.auth) send_mail.assert_called_with(email, ANY,ANY,notify=True, email_template='default') - @mock.patch('website.mails.send_mail') - def test_email_sent_when_reg_user_is_added(self, send_mail): + def test_email_sent_when_reg_user_is_added(self): contributor = UserFactory() contributors = [{ 'user': contributor, @@ -297,47 +299,29 @@ def test_email_sent_when_reg_user_is_added(self, send_mail): project = ProjectFactory(creator=self.auth.user) project.add_contributors(contributors, auth=self.auth) project.save() - assert send_mail.called - send_mail.assert_called_with( - to_addr=contributor.username, - mail=mails.CONTRIBUTOR_ADDED_DEFAULT, - user=contributor, - node=project, - referrer_name=self.auth.user.fullname, - all_global_subscriptions_none=False, - branded_service=None, - can_change_preferences=False, - logo=settings.OSF_LOGO, - osf_contact_email=settings.OSF_CONTACT_EMAIL, - is_initiator=False, - published_preprints=[] + assert self.mock_send_grid.called - ) assert contributor.contributor_added_email_records[project._id]['last_sent'] == approx(int(time.time()), rel=1) - @mock.patch('website.mails.send_mail') - def test_contributor_added_email_sent_to_unreg_user(self, send_mail): + def test_contributor_added_email_sent_to_unreg_user(self): unreg_user = UnregUserFactory() project = ProjectFactory() project.add_unregistered_contributor(fullname=unreg_user.fullname, email=unreg_user.email, auth=Auth(project.creator)) project.save() - assert send_mail.called + assert self.mock_send_grid.called - @mock.patch('website.mails.send_mail') - def test_forking_project_does_not_send_contributor_added_email(self, send_mail): + def test_forking_project_does_not_send_contributor_added_email(self): project = ProjectFactory() project.fork_node(auth=Auth(project.creator)) - assert not send_mail.called + assert not self.mock_send_grid.called - @mock.patch('website.mails.send_mail') - def test_templating_project_does_not_send_contributor_added_email(self, send_mail): + def test_templating_project_does_not_send_contributor_added_email(self): project = ProjectFactory() project.use_as_template(auth=Auth(project.creator)) - assert not send_mail.called + assert not self.mock_send_grid.called @mock.patch('website.archiver.tasks.archive') - @mock.patch('website.mails.send_mail') - def test_registering_project_does_not_send_contributor_added_email(self, send_mail, mock_archive): + def test_registering_project_does_not_send_contributor_added_email(self, mock_archive): project = ProjectFactory() provider = RegistrationProviderFactory() project.register_node( @@ -347,63 +331,57 @@ def test_registering_project_does_not_send_contributor_added_email(self, send_ma None, provider=provider ) - assert not send_mail.called + assert not self.mock_send_grid.called - @mock.patch('website.mails.send_mail') - def test_notify_contributor_email_does_not_send_before_throttle_expires(self, send_mail): + def test_notify_contributor_email_does_not_send_before_throttle_expires(self): contributor = UserFactory() project = ProjectFactory() auth = Auth(project.creator) notify_added_contributor(project, contributor, auth) - assert send_mail.called + assert self.mock_send_grid.called # 2nd call does not send email because throttle period has not expired notify_added_contributor(project, contributor, auth) - assert send_mail.call_count == 1 + assert self.mock_send_grid.call_count == 1 - @mock.patch('website.mails.send_mail') - def test_notify_contributor_email_sends_after_throttle_expires(self, send_mail): + def test_notify_contributor_email_sends_after_throttle_expires(self): throttle = 0.5 contributor = UserFactory() project = ProjectFactory() auth = Auth(project.creator) notify_added_contributor(project, contributor, auth, throttle=throttle) - assert send_mail.called + assert self.mock_send_grid.called time.sleep(1) # throttle period expires notify_added_contributor(project, contributor, auth, throttle=throttle) - assert send_mail.call_count == 2 + assert self.mock_send_grid.call_count == 2 - @mock.patch('website.mails.send_mail') - def test_add_contributor_to_fork_sends_email(self, send_mail): + def test_add_contributor_to_fork_sends_email(self): contributor = UserFactory() fork = self.project.fork_node(auth=Auth(self.creator)) fork.add_contributor(contributor, auth=Auth(self.creator)) fork.save() - assert send_mail.called - assert send_mail.call_count == 1 + assert self.mock_send_grid.called + assert self.mock_send_grid.call_count == 1 - @mock.patch('website.mails.send_mail') - def test_add_contributor_to_template_sends_email(self, send_mail): + def test_add_contributor_to_template_sends_email(self): contributor = UserFactory() template = self.project.use_as_template(auth=Auth(self.creator)) template.add_contributor(contributor, auth=Auth(self.creator)) template.save() - assert send_mail.called - assert send_mail.call_count == 1 + assert self.mock_send_grid.called + assert self.mock_send_grid.call_count == 1 - @mock.patch('website.mails.send_mail') - def test_creating_fork_does_not_email_creator(self, send_mail): + def test_creating_fork_does_not_email_creator(self): contributor = UserFactory() fork = self.project.fork_node(auth=Auth(self.creator)) - assert not send_mail.called + assert not self.mock_send_grid.called - @mock.patch('website.mails.send_mail') - def test_creating_template_does_not_email_creator(self, send_mail): + def test_creating_template_does_not_email_creator(self): contributor = UserFactory() template = self.project.use_as_template(auth=Auth(self.creator)) - assert not send_mail.called + assert not self.mock_send_grid.called def test_add_multiple_contributors_only_adds_one_log(self): n_logs_pre = self.project.logs.count() @@ -459,6 +437,8 @@ def tearDown(self): contributor_added.disconnect(notify_added_contributor) +@mock.patch('website.mails.settings.USE_EMAIL', True) +@mock.patch('website.mails.settings.USE_CELERY', False) class TestUserInviteViews(OsfTestCase): def setUp(self): @@ -467,6 +447,8 @@ def setUp(self): self.project = ProjectFactory(creator=self.user) self.invite_url = f'/api/v1/project/{self.project._primary_key}/invite_contributor/' + self.mock_send_grid = start_mock_send_grid(self) + def test_invite_contributor_post_if_not_in_db(self): name, email = fake.name(), fake_email() res = self.app.post( @@ -534,8 +516,7 @@ def test_invite_contributor_requires_fullname(self): ) assert res.status_code == http_status.HTTP_400_BAD_REQUEST - @mock.patch('website.project.views.contributor.mails.send_mail') - def test_send_claim_email_to_given_email(self, send_mail): + def test_send_claim_email_to_given_email(self): project = ProjectFactory() given_email = fake_email() unreg_user = project.add_unregistered_contributor( @@ -546,23 +527,9 @@ def test_send_claim_email_to_given_email(self, send_mail): project.save() send_claim_email(email=given_email, unclaimed_user=unreg_user, node=project) - send_mail.assert_called_with( - given_email, - mails.INVITE_DEFAULT, - user=unreg_user, - referrer=ANY, - node=project, - claim_url=ANY, - email=unreg_user.email, - fullname=unreg_user.fullname, - branded_service=None, - can_change_preferences=False, - logo='osf_logo', - osf_contact_email=settings.OSF_CONTACT_EMAIL - ) + self.mock_send_grid.assert_called() - @mock.patch('website.project.views.contributor.mails.send_mail') - def test_send_claim_email_to_referrer(self, send_mail): + def test_send_claim_email_to_referrer(self): project = ProjectFactory() referrer = project.creator given_email, real_email = fake_email(), fake_email() @@ -573,25 +540,9 @@ def test_send_claim_email_to_referrer(self, send_mail): project.save() send_claim_email(email=real_email, unclaimed_user=unreg_user, node=project) - assert send_mail.called - # email was sent to referrer - send_mail.assert_called_with( - referrer.username, - mails.FORWARD_INVITE, - user=unreg_user, - referrer=referrer, - claim_url=unreg_user.get_claim_url(project._id, external=True), - email=real_email.lower().strip(), - fullname=unreg_user.get_unclaimed_record(project._id)['name'], - node=project, - branded_service=None, - can_change_preferences=False, - logo=settings.OSF_LOGO, - osf_contact_email=settings.OSF_CONTACT_EMAIL - ) + assert self.mock_send_grid.called - @mock.patch('website.project.views.contributor.mails.send_mail') - def test_send_claim_email_before_throttle_expires(self, send_mail): + def test_send_claim_email_before_throttle_expires(self): project = ProjectFactory() given_email = fake_email() unreg_user = project.add_unregistered_contributor( @@ -601,14 +552,16 @@ def test_send_claim_email_before_throttle_expires(self, send_mail): ) project.save() send_claim_email(email=fake_email(), unclaimed_user=unreg_user, node=project) - send_mail.reset_mock() + self.mock_send_grid.reset_mock() # 2nd call raises error because throttle hasn't expired with pytest.raises(HTTPError): send_claim_email(email=fake_email(), unclaimed_user=unreg_user, node=project) - assert not send_mail.called + assert not self.mock_send_grid.called @pytest.mark.enable_implicit_clean +@mock.patch('website.mails.settings.USE_EMAIL', True) +@mock.patch('website.mails.settings.USE_CELERY', False) class TestClaimViews(OsfTestCase): def setUp(self): @@ -640,6 +593,8 @@ def setUp(self): ) self.project.save() + self.mock_send_grid = start_mock_send_grid(self) + @mock.patch('website.project.views.contributor.send_claim_email') def test_claim_user_already_registered_redirects_to_claim_user_registered(self, claim_email): name = fake.name() @@ -737,8 +692,7 @@ def test_claim_user_invited_with_no_email_posts_to_claim_form(self): }) assert res.status_code == 400 - @mock.patch('website.project.views.contributor.mails.send_mail') - def test_claim_user_post_with_registered_user_id(self, send_mail): + def test_claim_user_post_with_registered_user_id(self): # registered user who is attempting to claim the unclaimed contributor reg_user = UserFactory() payload = { @@ -750,14 +704,13 @@ def test_claim_user_post_with_registered_user_id(self, send_mail): res = self.app.post(url, json=payload) # mail was sent - assert send_mail.call_count == 2 + assert self.mock_send_grid.call_count == 2 # ... to the correct address - referrer_call = send_mail.call_args_list[0] - claimer_call = send_mail.call_args_list[1] - args, _ = referrer_call - assert args[0] == self.referrer.username - args, _ = claimer_call - assert args[0] == reg_user.username + referrer_call = self.mock_send_grid.call_args_list[0] + claimer_call = self.mock_send_grid.call_args_list[1] + + assert referrer_call[1]['to_addr'] == self.referrer.email + assert claimer_call[1]['to_addr'] == reg_user.email # view returns the correct JSON assert res.json == { @@ -766,29 +719,27 @@ def test_claim_user_post_with_registered_user_id(self, send_mail): 'fullname': self.given_name, } - @mock.patch('website.project.views.contributor.mails.send_mail') - def test_send_claim_registered_email(self, mock_send_mail): + def test_send_claim_registered_email(self): reg_user = UserFactory() send_claim_registered_email( claimer=reg_user, unclaimed_user=self.user, node=self.project ) - assert mock_send_mail.call_count == 2 - first_call_args = mock_send_mail.call_args_list[0][0] - assert first_call_args[0] == self.referrer.username - second_call_args = mock_send_mail.call_args_list[1][0] - assert second_call_args[0] == reg_user.username - - @mock.patch('website.project.views.contributor.mails.send_mail') - def test_send_claim_registered_email_before_throttle_expires(self, mock_send_mail): + assert self.mock_send_grid.call_count == 2 + first_call_args = self.mock_send_grid.call_args_list[0][1] + assert first_call_args['to_addr'] == self.referrer.email + second_call_args = self.mock_send_grid.call_args_list[1][1] + assert second_call_args['to_addr'] == reg_user.email + + def test_send_claim_registered_email_before_throttle_expires(self): reg_user = UserFactory() send_claim_registered_email( claimer=reg_user, unclaimed_user=self.user, node=self.project, ) - mock_send_mail.reset_mock() + self.mock_send_grid.reset_mock() # second call raises error because it was called before throttle period with pytest.raises(HTTPError): send_claim_registered_email( @@ -796,7 +747,7 @@ def test_send_claim_registered_email_before_throttle_expires(self, mock_send_mai unclaimed_user=self.user, node=self.project, ) - assert not mock_send_mail.called + assert not self.mock_send_grid.called @mock.patch('website.project.views.contributor.send_claim_registered_email') def test_claim_user_post_with_email_already_registered_sends_correct_email( @@ -973,8 +924,7 @@ def test_posting_to_claim_form_sets_fullname_to_given_name(self, mock_update_sea assert unreg.given_name == parsed_name['given_name'] assert unreg.family_name == parsed_name['family_name'] - @mock.patch('website.project.views.contributor.mails.send_mail') - def test_claim_user_post_returns_fullname(self, send_mail): + def test_claim_user_post_returns_fullname(self): url = f'/api/v1/user/{self.user._primary_key}/{self.project._primary_key}/claim/email/' res = self.app.post( url, @@ -985,34 +935,17 @@ def test_claim_user_post_returns_fullname(self, send_mail): }, ) assert res.json['fullname'] == self.given_name - assert send_mail.called - - send_mail.assert_called_with( - self.given_email, - mails.INVITE_DEFAULT, - user=self.user, - referrer=self.referrer, - node=ANY, - claim_url=ANY, - email=self.user.email, - fullname=self.user.fullname, - branded_service=None, - osf_contact_email=settings.OSF_CONTACT_EMAIL, - can_change_preferences=False, - logo='osf_logo' - ) + assert self.mock_send_grid.called - - @mock.patch('website.project.views.contributor.mails.send_mail') - def test_claim_user_post_if_email_is_different_from_given_email(self, send_mail): + def test_claim_user_post_if_email_is_different_from_given_email(self): email = fake_email() # email that is different from the one the referrer gave url = f'/api/v1/user/{self.user._primary_key}/{self.project._primary_key}/claim/email/' self.app.post(url, json={'value': email, 'pk': self.user._primary_key} ) - assert send_mail.called - assert send_mail.call_count == 2 - call_to_invited = send_mail.mock_calls[0] + assert self.mock_send_grid.called + assert self.mock_send_grid.call_count == 2 + call_to_invited = self.mock_send_grid.mock_calls[0] call_to_invited.assert_called_with(to_addr=email) - call_to_referrer = send_mail.mock_calls[1] + call_to_referrer = self.mock_send_grid.mock_calls[1] call_to_referrer.assert_called_with(to_addr=self.given_email) def test_claim_url_with_bad_token_returns_400(self): @@ -1062,4 +995,3 @@ def test_claim_user_with_preprint_id_adds_corresponding_claimed_tag_to_user(self assert res.status_code == 302 self.user.reload() assert provider_claimed_tag(self.preprint_with_source_tag.provider._id, 'preprint') in self.user.system_tags - diff --git a/tests/test_addons.py b/tests/test_addons.py index f6fda06a024..f8421f2bd74 100644 --- a/tests/test_addons.py +++ b/tests/test_addons.py @@ -1,7 +1,6 @@ import datetime import time import functools -import logging from importlib import import_module from unittest.mock import Mock @@ -16,13 +15,12 @@ from framework.auth.core import Auth from framework.exceptions import HTTPError from framework.sessions import get_session -from tests.base import OsfTestCase, get_default_metaschema +from tests.base import OsfTestCase from api_tests.utils import create_test_file from osf_tests.factories import ( AuthUserFactory, ProjectFactory, RegistrationFactory, - DraftRegistrationFactory, ) from website import settings from addons.base import views @@ -44,8 +42,6 @@ from api.caching.utils import storage_usage_cache from dateutil.parser import parse as parse_date from framework import sentry -from api.base.settings.defaults import API_BASE -from tests.json_api_test_app import JSONAPITestApp from website.settings import EXTERNAL_EMBER_APPS from waffle.testutils import override_flag from django.conf import settings as django_conf_settings diff --git a/tests/test_auth.py b/tests/test_auth.py index b59c1c065ab..6088c608e67 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -36,14 +36,21 @@ must_have_addon, must_be_addon_authorizer, ) from website.util import api_url_for +from conftest import start_mock_send_grid from tests.test_cas_authentication import generate_external_user_with_resp logger = logging.getLogger(__name__) +@mock.patch('website.mails.settings.USE_EMAIL', True) +@mock.patch('website.mails.settings.USE_CELERY', False) class TestAuthUtils(OsfTestCase): + def setUp(self): + super().setUp() + self.mock_send_grid = start_mock_send_grid(self) + def test_citation_with_only_fullname(self): user = UserFactory() user.fullname = 'Martin Luther King, Jr.' @@ -71,8 +78,7 @@ def test_unreg_user_can_register(self): assert user.get_confirmation_token(user.username) - @mock.patch('framework.auth.views.mails.send_mail') - def test_confirm_email(self, mock_mail): + def test_confirm_email(self): user = UnregUserFactory() auth.register_unconfirmed( @@ -91,7 +97,7 @@ def test_confirm_email(self, mock_mail): user.reload() - mock_mail.assert_not_called() + self.mock_send_grid.assert_not_called() self.app.set_cookie(settings.COOKIE_NAME, user.get_or_create_cookie().decode()) @@ -101,7 +107,7 @@ def test_confirm_email(self, mock_mail): assert res.status_code == 302 assert '/' == urlparse(res.location).path - assert len(mock_mail.call_args_list) == 0 + assert len(self.mock_send_grid.call_args_list) == 0 assert len(get_session()['status']) == 1 def test_get_user_by_id(self): @@ -163,23 +169,15 @@ def test_successful_external_first_login_without_attributes(self, mock_service_v cas.make_response_from_ticket(ticket, service_url) assert user == mock_external_first_login_authenticate.call_args[0][0] - @mock.patch('framework.auth.views.mails.send_mail') - def test_password_change_sends_email(self, mock_mail): + def test_password_change_sends_email(self): user = UserFactory() user.set_password('killerqueen') user.save() - assert len(mock_mail.call_args_list) == 1 - empty, kwargs = mock_mail.call_args - kwargs['user'].reload() + assert len(self.mock_send_grid.call_args_list) == 1 + empty, kwargs = self.mock_send_grid.call_args assert empty == () - assert kwargs == { - 'user': user, - 'mail': mails.PASSWORD_RESET, - 'to_addr': user.username, - 'can_change_preferences': False, - 'osf_contact_email': settings.OSF_CONTACT_EMAIL, - } + assert kwargs['to_addr'] == user.username @mock.patch('framework.auth.utils.requests.post') def test_validate_recaptcha_success(self, req_post): @@ -211,8 +209,7 @@ def test_validate_recaptcha_empty_response(self, req_post): # ensure None short circuits execution (no call to google) assert not validate_recaptcha(None) - @mock.patch('framework.auth.views.mails.send_mail') - def test_sign_up_twice_sends_two_confirmation_emails_only(self, mock_mail): + def test_sign_up_twice_sends_two_confirmation_emails_only(self): # Regression test for https://openscience.atlassian.net/browse/OSF-7060 url = api_url_for('register_user') sign_up_data = { @@ -223,20 +220,10 @@ def test_sign_up_twice_sends_two_confirmation_emails_only(self, mock_mail): } self.app.post(url, json=sign_up_data) - assert len(mock_mail.call_args_list) == 1 - args, kwargs = mock_mail.call_args - assert args == ( - 'caesar@romanempire.com', - mails.INITIAL_CONFIRM_EMAIL, - ) + assert len(self.mock_send_grid.call_args_list) == 1 self.app.post(url, json=sign_up_data) - assert len(mock_mail.call_args_list) == 2 - args, kwargs = mock_mail.call_args - assert args == ( - 'caesar@romanempire.com', - mails.INITIAL_CONFIRM_EMAIL, - ) + assert len(self.mock_send_grid.call_args_list) == 2 class TestAuthObject(OsfTestCase): diff --git a/tests/test_auth_views.py b/tests/test_auth_views.py index 7ab356ddba8..31445da2c8d 100644 --- a/tests/test_auth_views.py +++ b/tests/test_auth_views.py @@ -40,9 +40,12 @@ ) from website import mails, settings from website.util import api_url_for, web_url_for +from conftest import start_mock_send_grid pytestmark = pytest.mark.django_db +@mock.patch('website.mails.settings.USE_EMAIL', True) +@mock.patch('website.mails.settings.USE_CELERY', False) class TestAuthViews(OsfTestCase): def setUp(self): @@ -50,8 +53,9 @@ def setUp(self): self.user = AuthUserFactory() self.auth = self.user.auth - @mock.patch('framework.auth.views.mails.send_mail') - def test_register_ok(self, _): + self.mock_send_grid = start_mock_send_grid(self) + + def test_register_ok(self): url = api_url_for('register_user') name, email, password = fake.name(), fake_email(), 'underpressure' self.app.post( @@ -67,9 +71,7 @@ def test_register_ok(self, _): assert user.fullname == name assert user.accepted_terms_of_service is None - # Regression test for https://github.com/CenterForOpenScience/osf.io/issues/2902 - @mock.patch('framework.auth.views.mails.send_mail') - def test_register_email_case_insensitive(self, _): + def test_register_email_case_insensitive(self): url = api_url_for('register_user') name, email, password = fake.name(), fake_email(), 'underpressure' self.app.post( @@ -84,8 +86,7 @@ def test_register_email_case_insensitive(self, _): user = OSFUser.objects.get(username=email) assert user.fullname == name - @mock.patch('framework.auth.views.mails.send_mail') - def test_register_email_with_accepted_tos(self, _): + def test_register_email_with_accepted_tos(self): url = api_url_for('register_user') name, email, password = fake.name(), fake_email(), 'underpressure' self.app.post( @@ -101,8 +102,7 @@ def test_register_email_with_accepted_tos(self, _): user = OSFUser.objects.get(username=email) assert user.accepted_terms_of_service - @mock.patch('framework.auth.views.mails.send_mail') - def test_register_email_without_accepted_tos(self, _): + def test_register_email_without_accepted_tos(self): url = api_url_for('register_user') name, email, password = fake.name(), fake_email(), 'underpressure' self.app.post( @@ -195,8 +195,7 @@ def test_register_blocked_email_domain(self): assert users.count() == 0 @mock.patch('framework.auth.views.validate_recaptcha', return_value=True) - @mock.patch('framework.auth.views.mails.send_mail') - def test_register_good_captcha(self, _, validate_recaptcha): + def test_register_good_captcha(self, validate_recaptcha): url = api_url_for('register_user') name, email, password = fake.name(), fake_email(), 'underpressure' captcha = 'some valid captcha' @@ -217,8 +216,7 @@ def test_register_good_captcha(self, _, validate_recaptcha): assert user.fullname == name @mock.patch('framework.auth.views.validate_recaptcha', return_value=False) - @mock.patch('framework.auth.views.mails.send_mail') - def test_register_missing_captcha(self, _, validate_recaptcha): + def test_register_missing_captcha(self, validate_recaptcha): url = api_url_for('register_user') name, email, password = fake.name(), fake_email(), 'underpressure' with mock.patch.object(settings, 'RECAPTCHA_SITE_KEY', 'some_value'): @@ -236,8 +234,7 @@ def test_register_missing_captcha(self, _, validate_recaptcha): assert resp.status_code == http_status.HTTP_400_BAD_REQUEST @mock.patch('framework.auth.views.validate_recaptcha', return_value=False) - @mock.patch('framework.auth.views.mails.send_mail') - def test_register_bad_captcha(self, _, validate_recaptcha): + def test_register_bad_captcha(self, validate_recaptcha): url = api_url_for('register_user') name, email, password = fake.name(), fake_email(), 'underpressure' with mock.patch.object(settings, 'RECAPTCHA_SITE_KEY', 'some_value'): @@ -317,35 +314,21 @@ def test_register_sends_user_registered_signal(self, mock_send_confirm_email): assert mock_signals.signals_sent() == {auth.signals.user_registered, auth.signals.unconfirmed_user_created} assert mock_send_confirm_email.called - @mock.patch('framework.auth.views.mails.send_mail') - def test_resend_confirmation(self, send_mail: MagicMock): + def test_resend_confirmation(self): email = 'test@mail.com' token = self.user.add_unconfirmed_email(email) self.user.save() url = api_url_for('resend_confirmation') header = {'address': email, 'primary': False, 'confirmed': False} self.app.put(url, json={'id': self.user._id, 'email': header}, auth=self.user.auth) - assert send_mail.called - send_mail.assert_called_with( - email, - mails.CONFIRM_EMAIL, - user=self.user, - confirmation_url=ANY, - email='test@mail.com', - merge_target=None, - external_id_provider=None, - branded_preprints_provider=None, - osf_support_email=settings.OSF_SUPPORT_EMAIL, - can_change_preferences=False, - logo='osf_logo' - ) + assert self.mock_send_grid.called + self.user.reload() assert token != self.user.get_confirmation_token(email) with pytest.raises(InvalidTokenError): self.user.get_unconfirmed_email_for_token(token) - @mock.patch('framework.auth.views.mails.send_mail') - def test_click_confirmation_email(self, send_mail): + def test_click_confirmation_email(self): # TODO: check in qa url encoding email = 'test@mail.com' token = self.user.add_unconfirmed_email(email) @@ -509,14 +492,13 @@ def test_resend_confirmation_not_work_for_confirmed_email(self): assert res.status_code == 400 assert res.json['message_long'] == 'Cannnot resend confirmation for confirmed emails' - @mock.patch('framework.auth.views.mails.send_mail') - def test_resend_confirmation_does_not_send_before_throttle_expires(self, send_mail): + def test_resend_confirmation_does_not_send_before_throttle_expires(self): email = 'test@mail.com' self.user.save() url = api_url_for('resend_confirmation') header = {'address': email, 'primary': False, 'confirmed': False} self.app.put(url, json={'id': self.user._id, 'email': header}, auth=self.user.auth) - assert send_mail.called + assert self.mock_send_grid.called # 2nd call does not send email because throttle period has not expired res = self.app.put(url, json={'id': self.user._id, 'email': header}, auth=self.user.auth) assert res.status_code == 400 @@ -940,4 +922,3 @@ def test_reset_password_logs_out_user(self): assert 'reauth' not in location assert 'logout?service=' in location assert 'resetpassword' in location - diff --git a/tests/test_conferences.py b/tests/test_conferences.py deleted file mode 100644 index 2f431df55bd..00000000000 --- a/tests/test_conferences.py +++ /dev/null @@ -1,798 +0,0 @@ -from unittest import mock - -import hmac -import hashlib -from io import BytesIO - -import pytest -from django.db import IntegrityError -from furl import furl - -from framework.auth import get_or_create_user -from framework.auth.core import Auth - -from osf.models import OSFUser, AbstractNode -from addons.wiki.models import WikiVersion -from osf.exceptions import BlockedEmailError -from website import settings -from website.conferences import views -from website.conferences import utils, message -from website.util import api_url_for, web_url_for - -from tests.base import OsfTestCase, fake -from osf_tests.factories import ConferenceFactory, ProjectFactory, UserFactory - - -def assert_absolute(url): - parsed_domain = furl(settings.DOMAIN) - parsed_url = furl(url) - assert parsed_domain.host == parsed_url.host - - -def assert_equal_urls(first, second): - parsed_first = furl(first) - parsed_first.port = None - parsed_second = furl(second) - parsed_second.port = None - assert parsed_first == parsed_second - - -def create_fake_conference_nodes(n, conference): - nodes = [] - for i in range(n): - node = ProjectFactory(is_public=True) - conference.submissions.add(node) - node.save() - nodes.append(node) - return nodes - - -def create_fake_conference_nodes_bad_data(conference, n, bad_n, endpoint): - nodes = [] - for i in range(n): - node = ProjectFactory(is_public=True) - conference.submissions.add(node) - # inject bad data - if i < bad_n: - # Delete only contributor - node.contributor_set.filter(user=node.contributors.first()).delete() - node.save() - nodes.append(node) - return nodes - - -class TestConferenceUtils(OsfTestCase): - - def test_get_or_create_user_exists(self): - user = UserFactory() - fetched, created = get_or_create_user(user.fullname, user.username, is_spam=True) - assert not created - assert user._id == fetched._id - assert 'is_spam' not in fetched.system_tags - - def test_get_or_create_user_not_exists(self): - fullname = 'Roger Taylor' - username = 'roger@queen.com' - fetched, created = get_or_create_user(fullname, username, is_spam=False) - fetched.save() # in order to access m2m fields, e.g. tags - assert created - assert fetched.fullname == fullname - assert fetched.username == username - assert 'is_spam' not in fetched.system_tags - - def test_get_or_create_user_is_spam(self): - fullname = 'John Deacon' - username = 'deacon@queen.com' - fetched, created = get_or_create_user(fullname, username, is_spam=True) - fetched.save() # in order to access m2m fields, e.g. tags - assert created - assert fetched.fullname == fullname - assert fetched.username == username - assert 'is_spam' in fetched.system_tags - - def test_get_or_create_user_with_blocked_domain(self): - fullname = 'Kanye West' - username = 'kanye@mailinator.com' - with pytest.raises(BlockedEmailError) as e: - get_or_create_user(fullname, username, is_spam=True) - assert str(e.value) == 'Invalid Email' - - -class ContextTestCase(OsfTestCase): - MAILGUN_API_KEY = 'mailkimp' - - @classmethod - def setUpClass(cls): - super().setUpClass() - settings.MAILGUN_API_KEY, cls._MAILGUN_API_KEY = cls.MAILGUN_API_KEY, settings.MAILGUN_API_KEY - - @classmethod - def tearDownClass(cls): - super().tearDownClass() - settings.MAILGUN_API_KEY = cls._MAILGUN_API_KEY - - def make_context(self, method='POST', **kwargs): - data = { - 'X-Mailgun-Sscore': 0, - 'timestamp': '123', - 'token': 'secret', - 'signature': hmac.new( - key=settings.MAILGUN_API_KEY.encode(), - msg='{}{}'.format('123', 'secret').encode(), - digestmod=hashlib.sha256, - ).hexdigest(), - } - data.update(kwargs.pop('data', {})) - data = { - key: value - for key, value in data.items() - if value is not None - } - return self.app.application.test_request_context(method=method, data=data, **kwargs) - - -class TestProvisionNode(ContextTestCase): - - def setUp(self): - super().setUp() - self.node = ProjectFactory() - self.user = self.node.creator - self.conference = ConferenceFactory() - self.body = 'dragon on my back' - self.content = b'dragon attack' - self.attachment = BytesIO(self.content) - self.recipient = '{}{}-poster@osf.io'.format( - 'test-' if settings.DEV_MODE else '', - self.conference.endpoint, - ) - - def make_context(self, **kwargs): - data = { - 'attachment-count': '1', - 'attachment-1': (self.attachment, 'attachment-1'), - 'X-Mailgun-Sscore': 0, - 'recipient': self.recipient, - 'stripped-text': self.body, - } - data.update(kwargs.pop('data', {})) - return super().make_context(data=data, **kwargs) - - def test_provision(self): - with self.make_context(): - msg = message.ConferenceMessage() - utils.provision_node(self.conference, msg, self.node, self.user) - assert self.node.is_public - assert self.conference.admins.first() in self.node.contributors - assert 'emailed' in self.node.system_tags - assert self.conference.endpoint in self.node.system_tags - assert self.node in self.conference.submissions.all() - assert 'spam' not in self.node.system_tags - - def test_provision_private(self): - self.conference.public_projects = False - self.conference.save() - with self.make_context(): - msg = message.ConferenceMessage() - utils.provision_node(self.conference, msg, self.node, self.user) - assert not self.node.is_public - assert self.conference.admins.first() in self.node.contributors - assert 'emailed' in self.node.system_tags - assert 'spam' not in self.node.system_tags - - def test_provision_spam(self): - with self.make_context(data={'X-Mailgun-Sscore': message.SSCORE_MAX_VALUE + 1}): - msg = message.ConferenceMessage() - utils.provision_node(self.conference, msg, self.node, self.user) - assert not self.node.is_public - assert self.conference.admins.first() in self.node.contributors - assert 'emailed' in self.node.system_tags - assert 'spam' in self.node.system_tags - - @mock.patch('website.conferences.utils.waterbutler_api_url_for') - @mock.patch('website.conferences.utils.requests.put') - def test_upload(self, mock_put, mock_get_url): - mock_get_url.return_value = 'http://queen.com/' - file_name = 'hammer-to-fall' - self.attachment.filename = file_name - self.attachment.content_type = 'application/json' - utils.upload_attachment(self.user, self.node, self.attachment) - mock_get_url.assert_called_with( - self.node._id, - 'osfstorage', - _internal=True, - base_url=self.node.osfstorage_region.waterbutler_url, - cookie=self.user.get_or_create_cookie().decode(), - name=file_name - ) - mock_put.assert_called_with( - mock_get_url.return_value, - data=self.content, - cookies={settings.COOKIE_NAME: self.user.get_or_create_cookie().decode()}, - ) - - @mock.patch('website.conferences.utils.waterbutler_api_url_for') - @mock.patch('website.conferences.utils.requests.put') - def test_upload_no_file_name(self, mock_put, mock_get_url): - mock_get_url.return_value = 'http://queen.com/' - self.attachment.filename = '' - self.attachment.content_type = 'application/json' - utils.upload_attachment(self.user, self.node, self.attachment) - mock_get_url.assert_called_with( - self.node._id, - 'osfstorage', - _internal=True, - base_url=self.node.osfstorage_region.waterbutler_url, - cookie=self.user.get_or_create_cookie().decode(), - name=settings.MISSING_FILE_NAME, - ) - mock_put.assert_called_with( - mock_get_url.return_value, - data=self.content, - cookies={settings.COOKIE_NAME: self.user.get_or_create_cookie().decode()}, - ) - - @mock.patch('website.conferences.utils.upload_attachments') - def test_add_poster_by_email(self, mock_upload_attachments): - conference = ConferenceFactory() - - with self.make_context(data={'from': 'bdawk@sb52champs.com', 'subject': 'It\'s PARTY TIME!'}): - msg = message.ConferenceMessage() - views.add_poster_by_email(conference, msg) - - user = OSFUser.objects.get(username='bdawk@sb52champs.com') - assert user.email == 'bdawk@sb52champs.com' - assert user.fullname == user._id # user's shouldn't be able to use email as fullname, so we use the guid. - - -class TestMessage(ContextTestCase): - PUSH_CONTEXT = False - - def test_verify_signature_valid(self): - with self.make_context(): - msg = message.ConferenceMessage() - msg.verify_signature() - - def test_verify_signature_invalid(self): - with self.make_context(data={'signature': 'fake'}): - self.app.application.preprocess_request() - msg = message.ConferenceMessage() - with pytest.raises(message.ConferenceError): - msg.verify_signature() - - def test_is_spam_false_missing_headers(self): - ctx = self.make_context( - method='POST', - data={'X-Mailgun-Sscore': message.SSCORE_MAX_VALUE - 1}, - ) - with ctx: - msg = message.ConferenceMessage() - assert not msg.is_spam - - def test_is_spam_false_all_headers(self): - ctx = self.make_context( - method='POST', - data={ - 'X-Mailgun-Sscore': message.SSCORE_MAX_VALUE - 1, - 'X-Mailgun-Dkim-Check-Result': message.DKIM_PASS_VALUES[0], - 'X-Mailgun-Spf': message.SPF_PASS_VALUES[0], - }, - ) - with ctx: - msg = message.ConferenceMessage() - assert not msg.is_spam - - def test_is_spam_true_sscore(self): - ctx = self.make_context( - method='POST', - data={'X-Mailgun-Sscore': message.SSCORE_MAX_VALUE + 1}, - ) - with ctx: - msg = message.ConferenceMessage() - assert msg.is_spam - - def test_is_spam_true_dkim(self): - ctx = self.make_context( - method='POST', - data={'X-Mailgun-Dkim-Check-Result': message.DKIM_PASS_VALUES[0][::-1]}, - ) - with ctx: - msg = message.ConferenceMessage() - assert msg.is_spam - - def test_is_spam_true_spf(self): - ctx = self.make_context( - method='POST', - data={'X-Mailgun-Spf': message.SPF_PASS_VALUES[0][::-1]}, - ) - with ctx: - msg = message.ConferenceMessage() - assert msg.is_spam - - def test_subject(self): - ctx = self.make_context( - method='POST', - data={'subject': 'RE: Hip Hopera'}, - ) - with ctx: - msg = message.ConferenceMessage() - assert msg.subject == 'Hip Hopera' - - def test_recipient(self): - address = 'test-conference@osf.io' - ctx = self.make_context( - method='POST', - data={'recipient': address}, - ) - with ctx: - msg = message.ConferenceMessage() - assert msg.recipient == address - - def test_text(self): - text = 'welcome to my nuclear family' - ctx = self.make_context( - method='POST', - data={'stripped-text': text}, - ) - with ctx: - msg = message.ConferenceMessage() - assert msg.text == text - - def test_sender_name(self): - names = [ - (' Fred', 'Fred'), - ('Me‰¨ü', 'Me‰¨ü'), - ('fred@queen.com', 'fred@queen.com'), - ('Fred ', 'Fred'), - ('"Fred" ', 'Fred'), - ] - for name in names: - with self.make_context(data={'from': name[0]}): - msg = message.ConferenceMessage() - assert msg.sender_name == name[1] - - def test_sender_email(self): - emails = [ - ('fred@queen.com', 'fred@queen.com'), - ('FRED@queen.com', 'fred@queen.com') - ] - for email in emails: - with self.make_context(data={'from': email[0]}): - msg = message.ConferenceMessage() - assert msg.sender_email == email[1] - - def test_route_invalid_pattern(self): - with self.make_context(data={'recipient': 'spam@osf.io'}): - self.app.application.preprocess_request() - msg = message.ConferenceMessage() - with pytest.raises(message.ConferenceError): - msg.route - - def test_route_invalid_test(self): - recipient = '{}conf-talk@osf.io'.format('' if settings.DEV_MODE else 'stage-') - with self.make_context(data={'recipient': recipient}): - self.app.application.preprocess_request() - msg = message.ConferenceMessage() - with pytest.raises(message.ConferenceError): - msg.route - - def test_route_valid_alternate(self): - conf = ConferenceFactory(endpoint='chocolate', active=True) - conf.name = 'Chocolate Conference' - conf.field_names['submission2'] = 'data' - conf.save() - recipient = '{}chocolate-data@osf.io'.format('test-' if settings.DEV_MODE else '') - with self.make_context(data={'recipient': recipient}): - self.app.application.preprocess_request() - msg = message.ConferenceMessage() - assert msg.conference_name == 'chocolate' - assert msg.conference_category == 'data' - conf.__class__.delete(conf) - - def test_route_valid_b(self): - recipient = '{}conf-poster@osf.io'.format('test-' if settings.DEV_MODE else '') - with self.make_context(data={'recipient': recipient}): - self.app.application.preprocess_request() - msg = message.ConferenceMessage() - assert msg.conference_name == 'conf' - assert msg.conference_category == 'poster' - - def test_alternate_route_invalid(self): - recipient = '{}chocolate-data@osf.io'.format('test-' if settings.DEV_MODE else '') - with self.make_context(data={'recipient': recipient}): - self.app.application.preprocess_request() - msg = message.ConferenceMessage() - with pytest.raises(message.ConferenceError): - msg.route - - def test_attachments_count_zero(self): - with self.make_context(data={'attachment-count': '0'}): - msg = message.ConferenceMessage() - assert msg.attachments == [] - - def test_attachments_count_one(self): - content = b'slightly mad' - sio = BytesIO(content) - ctx = self.make_context( - method='POST', - data={ - 'attachment-count': 1, - 'attachment-1': (sio, 'attachment-1'), - }, - ) - with ctx: - msg = message.ConferenceMessage() - assert len(msg.attachments) == 1 - assert msg.attachments[0].read() == content - - -class TestConferenceEmailViews(OsfTestCase): - - def test_redirect_to_meetings_url(self): - url = '/presentations/' - res = self.app.get(url) - assert res.status_code == 302 - res = self.app.get(url, follow_redirects=True) - assert res.request.path == '/meetings/' - - def test_conference_submissions(self): - AbstractNode.objects.all().delete() - conference1 = ConferenceFactory() - conference2 = ConferenceFactory() - # Create conference nodes - create_fake_conference_nodes( - 3, - conference1, - ) - create_fake_conference_nodes( - 2, - conference2, - ) - - url = api_url_for('conference_submissions') - res = self.app.get(url) - assert res.json['success'] - - def test_conference_plain_returns_200(self): - conference = ConferenceFactory() - url = web_url_for('conference_results__plain', meeting=conference.endpoint) - res = self.app.get(url) - assert res.status_code == 200 - - def test_conference_data(self): - conference = ConferenceFactory() - - # Create conference nodes - n_conference_nodes = 3 - create_fake_conference_nodes( - n_conference_nodes, - conference, - ) - # Create a non-conference node - ProjectFactory() - - url = api_url_for('conference_data', meeting=conference.endpoint) - res = self.app.get(url) - assert res.status_code == 200 - assert len(res.json) == n_conference_nodes - - # Regression for OSF-8864 to confirm bad project data does not make whole conference break - def test_conference_bad_data(self): - conference = ConferenceFactory() - - # Create conference nodes - n_conference_nodes = 3 - n_conference_nodes_bad = 1 - create_fake_conference_nodes_bad_data( - conference, - n_conference_nodes, - n_conference_nodes_bad, - conference, - ) - # Create a non-conference node - ProjectFactory() - - url = api_url_for('conference_data', meeting=conference.endpoint) - res = self.app.get(url) - assert res.status_code == 200 - assert len(res.json) == n_conference_nodes - n_conference_nodes_bad - - def test_conference_data_url_upper(self): - conference = ConferenceFactory() - - # Create conference nodes - n_conference_nodes = 3 - create_fake_conference_nodes( - n_conference_nodes, - conference, - ) - # Create a non-conference node - ProjectFactory() - - url = api_url_for('conference_data', meeting=conference.endpoint.upper()) - res = self.app.get(url) - assert res.status_code == 200 - assert len(res.json) == n_conference_nodes - - def test_conference_data_tag_upper(self): - conference = ConferenceFactory() - - # Create conference nodes - n_conference_nodes = 3 - create_fake_conference_nodes( - n_conference_nodes, - conference, - ) - # Create a non-conference node - ProjectFactory() - - url = api_url_for('conference_data', meeting=conference.endpoint) - res = self.app.get(url) - assert res.status_code == 200 - assert len(res.json) == n_conference_nodes - - def test_conference_results(self): - conference = ConferenceFactory() - - url = web_url_for('conference_results', meeting=conference.endpoint) - res = self.app.get(url) - assert res.status_code == 200 - - def test_confererence_results_endpoint_is_case_insensitive(self): - ConferenceFactory(endpoint='StudySwap') - url = web_url_for('conference_results', meeting='studyswap') - res = self.app.get(url) - assert res.status_code == 200 - - -class TestConferenceModel(OsfTestCase): - - def test_endpoint_is_required(self): - with pytest.raises(IntegrityError): - ConferenceFactory(endpoint=None, name=fake.company()).save() - - def test_name_is_required(self): - with pytest.raises(IntegrityError): - ConferenceFactory(endpoint='spsp2014', name=None).save() - - def test_default_field_names(self): - conf = ConferenceFactory(endpoint='cookie', name='Cookies Conference') - conf.save() - assert conf.field_names['submission1'] == 'poster' - assert conf.field_names['mail_subject'] == 'Presentation title' - - def test_conference_valid_submissions(self): - conf = ConferenceFactory(endpoint='Hamburgers', name='Hamburger conference') - conf.save() - - # 3 good nodes added - create_fake_conference_nodes(3, conf) - - # Deleted node added - deleted_node = ProjectFactory(is_public=True) - deleted_node.is_deleted = True - deleted_node.save() - conf.submissions.add(deleted_node) - - # Private node added - private_node = ProjectFactory(is_public=False) - conf.submissions.add(private_node) - - assert conf.submissions.count() == 5 - assert conf.valid_submissions.count() == 3 - - -class TestConferenceIntegration(ContextTestCase): - - @mock.patch('website.conferences.views.send_mail') - @mock.patch('website.conferences.utils.upload_attachments') - def test_integration(self, mock_upload, mock_send_mail): - fullname = 'John Deacon' - username = 'deacon@queen.com' - title = 'good songs' - conference = ConferenceFactory() - body = 'dragon on my back' - content = 'dragon attack' - recipient = '{}{}-poster@osf.io'.format( - 'test-' if settings.DEV_MODE else '', - conference.endpoint, - ) - self.app.post( - api_url_for('meeting_hook'), - data={ - 'X-Mailgun-Sscore': 0, - 'timestamp': '123', - 'token': 'secret', - 'signature': hmac.new( - key=settings.MAILGUN_API_KEY.encode(), - msg='{}{}'.format('123', 'secret').encode(), - digestmod=hashlib.sha256, - ).hexdigest(), - 'attachment-count': '1', - 'X-Mailgun-Sscore': 0, - 'from': f'{fullname} <{username}>', - 'recipient': recipient, - 'subject': title, - 'stripped-text': body, - 'attachment-1': (BytesIO(content.encode()), 'attachment-1') - }, - ) - assert mock_upload.called - users = OSFUser.objects.filter(username=username) - assert users.count() == 1 - nodes = AbstractNode.objects.filter(title=title) - assert nodes.count() == 1 - node = nodes[0] - assert WikiVersion.objects.get_for_node(node, 'home').content == body - assert mock_send_mail.called - call_args, call_kwargs = mock_send_mail.call_args - assert_absolute(call_kwargs['conf_view_url']) - assert_absolute(call_kwargs['set_password_url']) - assert_absolute(call_kwargs['profile_url']) - assert_absolute(call_kwargs['file_url']) - assert_absolute(call_kwargs['node_url']) - - @mock.patch('website.conferences.views.send_mail') - def test_integration_inactive(self, mock_send_mail): - conference = ConferenceFactory(active=False) - fullname = 'John Deacon' - username = 'deacon@queen.com' - title = 'good songs' - body = 'dragon on my back' - recipient = '{}{}-poster@osf.io'.format( - 'test-' if settings.DEV_MODE else '', - conference.endpoint, - ) - res = self.app.post( - api_url_for('meeting_hook'), - data={ - 'X-Mailgun-Sscore': 0, - 'timestamp': '123', - 'token': 'secret', - 'signature': hmac.new( - key=settings.MAILGUN_API_KEY.encode(), - msg='{}{}'.format('123', 'secret').encode(), - digestmod=hashlib.sha256, - ).hexdigest(), - 'attachment-count': '1', - 'X-Mailgun-Sscore': 0, - 'from': f'{fullname} <{username}>', - 'recipient': recipient, - 'subject': title, - 'stripped-text': body, - }, - ) - assert res.status_code == 406 - call_args, call_kwargs = mock_send_mail.call_args - assert call_args == (username, views.CONFERENCE_INACTIVE) - assert call_kwargs['fullname'] == fullname - assert_equal_urls( - call_kwargs['presentations_url'], - web_url_for('conference_view', _absolute=True), - ) - - @mock.patch('website.conferences.views.send_mail') - @mock.patch('website.conferences.utils.upload_attachments') - def test_integration_wo_full_name(self, mock_upload, mock_send_mail): - username = 'no_full_name@mail.com' - title = 'no full name only email' - conference = ConferenceFactory() - body = 'dragon on my back' - content = 'dragon attack' - recipient = '{}{}-poster@osf.io'.format( - 'test-' if settings.DEV_MODE else '', - conference.endpoint, - ) - self.app.post( - api_url_for('meeting_hook'), - data={ - 'X-Mailgun-Sscore': 0, - 'timestamp': '123', - 'token': 'secret', - 'signature': hmac.new( - key=settings.MAILGUN_API_KEY.encode(), - msg='{}{}'.format('123', 'secret').encode(), - digestmod=hashlib.sha256, - ).hexdigest(), - 'attachment-count': '1', - 'X-Mailgun-Sscore': 0, - 'from': username, - 'recipient': recipient, - 'subject': title, - 'stripped-text': body, - 'attachment-1': (BytesIO(content.encode()), 'attachment-1') - }, - ) - assert mock_upload.called - users = OSFUser.objects.filter(username=username) - assert users.count() == 1 - nodes = AbstractNode.objects.filter(title=title) - assert nodes.count() == 1 - node = nodes[0] - assert WikiVersion.objects.get_for_node(node, 'home').content == body - assert mock_send_mail.called - call_args, call_kwargs = mock_send_mail.call_args - assert_absolute(call_kwargs['conf_view_url']) - assert_absolute(call_kwargs['set_password_url']) - assert_absolute(call_kwargs['profile_url']) - assert_absolute(call_kwargs['file_url']) - assert_absolute(call_kwargs['node_url']) - - @mock.patch('website.conferences.views.send_mail') - @mock.patch('website.conferences.utils.upload_attachments') - def test_create_conference_node_with_same_name_as_existing_node(self, mock_upload, mock_send_mail): - conference = ConferenceFactory() - user = UserFactory() - title = 'Long Live Greg' - ProjectFactory(creator=user, title=title) - - body = 'Greg is a good plant' - content = 'Long may they reign.' - recipient = '{}{}-poster@osf.io'.format( - 'test-' if settings.DEV_MODE else '', - conference.endpoint, - ) - self.app.post( - api_url_for('meeting_hook'), - data={ - 'X-Mailgun-Sscore': 0, - 'timestamp': '123', - 'token': 'secret', - 'signature': hmac.new( - key=settings.MAILGUN_API_KEY.encode(), - msg='{}{}'.format('123', 'secret').encode(), - digestmod=hashlib.sha256, - ).hexdigest(), - 'attachment-count': '1', - 'X-Mailgun-Sscore': 0, - 'from': f'{user.fullname} <{user.username}>', - 'recipient': recipient, - 'subject': title, - 'stripped-text': body, - 'attachment-1':(BytesIO(content.encode()), 'attachment-1') - }, - ) - - assert AbstractNode.objects.filter(title=title, creator=user).count() == 2 - assert mock_upload.called - assert mock_send_mail.called - - - @mock.patch('website.conferences.views.send_mail') - def test_conferences_discontinued(self, mock_send_mail): - fullname = 'John Deacon' - username = 'deacon@queen.com' - title = 'good songs' - conference = ConferenceFactory() - body = 'dragon on my back' - content = 'dragon attack' - recipient = '{}{}-poster@osf.io'.format( - 'test-' if settings.DEV_MODE else '', - conference.endpoint, - ) - from waffle.testutils import override_flag - from osf import features - with override_flag(features.DISABLE_MEETINGS, active=True): - res = self.app.post( - api_url_for('meeting_hook'), - data={ - 'X-Mailgun-Sscore': 0, - 'timestamp': '123', - 'token': 'secret', - 'signature': hmac.new( - key=settings.MAILGUN_API_KEY.encode(), - msg='{}{}'.format('123', 'secret').encode(), - digestmod=hashlib.sha256, - ).hexdigest(), - 'attachment-count': '1', - 'X-Mailgun-Sscore': 0, - 'from': f'{fullname} <{username}>', - 'recipient': recipient, - 'subject': title, - 'stripped-text': body, - 'attachment-1': (BytesIO(content.encode()), 'attachment-1') - }, - ) - assert res.status_code == 501 - assert res.json['message_short'] == 'Service has been discontinued' - - assert mock_send_mail.called diff --git a/tests/test_events.py b/tests/test_events.py index 866bf6ec337..c9e30273b49 100644 --- a/tests/test_events.py +++ b/tests/test_events.py @@ -131,7 +131,7 @@ def setUp(self): self.user_2 = factories.AuthUserFactory() self.project = factories.ProjectFactory(creator=self.user_1) # subscription - self.sub = factories.NotificationSubscriptionFactory( + self.sub = factories.NotificationSubscriptionLegacyFactory( _id=self.project._id + 'file_updated', owner=self.project, event_name='file_updated', @@ -157,7 +157,7 @@ def setUp(self): self.user = factories.UserFactory() self.consolidate_auth = Auth(user=self.user) self.project = factories.ProjectFactory() - self.project_subscription = factories.NotificationSubscriptionFactory( + self.project_subscription = factories.NotificationSubscriptionLegacyFactory( _id=self.project._id + '_file_updated', owner=self.project, event_name='file_updated' @@ -184,7 +184,7 @@ def setUp(self): self.user = factories.UserFactory() self.consolidate_auth = Auth(user=self.user) self.project = factories.ProjectFactory() - self.project_subscription = factories.NotificationSubscriptionFactory( + self.project_subscription = factories.NotificationSubscriptionLegacyFactory( _id=self.project._id + '_file_updated', owner=self.project, event_name='file_updated' @@ -219,7 +219,7 @@ def setUp(self): self.user = factories.UserFactory() self.consolidate_auth = Auth(user=self.user) self.project = factories.ProjectFactory() - self.project_subscription = factories.NotificationSubscriptionFactory( + self.project_subscription = factories.NotificationSubscriptionLegacyFactory( _id=self.project._id + '_file_updated', owner=self.project, event_name='file_updated' @@ -249,7 +249,7 @@ def setUp(self): self.user_2 = factories.AuthUserFactory() self.project = factories.ProjectFactory(creator=self.user_1) # subscription - self.sub = factories.NotificationSubscriptionFactory( + self.sub = factories.NotificationSubscriptionLegacyFactory( _id=self.project._id + 'file_updated', owner=self.project, event_name='file_updated', @@ -303,21 +303,21 @@ def setUp(self): ) # Subscriptions # for parent node - self.sub = factories.NotificationSubscriptionFactory( + self.sub = factories.NotificationSubscriptionLegacyFactory( _id=self.project._id + '_file_updated', owner=self.project, event_name='file_updated' ) self.sub.save() # for private node - self.private_sub = factories.NotificationSubscriptionFactory( + self.private_sub = factories.NotificationSubscriptionLegacyFactory( _id=self.private_node._id + '_file_updated', owner=self.private_node, event_name='file_updated' ) self.private_sub.save() # for file subscription - self.file_sub = factories.NotificationSubscriptionFactory( + self.file_sub = factories.NotificationSubscriptionLegacyFactory( _id='{pid}_{wbid}_file_updated'.format( pid=self.project._id, wbid=self.event.waterbutler_id @@ -398,21 +398,21 @@ def setUp(self): ) # Subscriptions # for parent node - self.sub = factories.NotificationSubscriptionFactory( + self.sub = factories.NotificationSubscriptionLegacyFactory( _id=self.project._id + '_file_updated', owner=self.project, event_name='file_updated' ) self.sub.save() # for private node - self.private_sub = factories.NotificationSubscriptionFactory( + self.private_sub = factories.NotificationSubscriptionLegacyFactory( _id=self.private_node._id + '_file_updated', owner=self.private_node, event_name='file_updated' ) self.private_sub.save() # for file subscription - self.file_sub = factories.NotificationSubscriptionFactory( + self.file_sub = factories.NotificationSubscriptionLegacyFactory( _id='{pid}_{wbid}_file_updated'.format( pid=self.project._id, wbid=self.event.waterbutler_id @@ -480,21 +480,21 @@ def setUp(self): ) # Subscriptions # for parent node - self.sub = factories.NotificationSubscriptionFactory( + self.sub = factories.NotificationSubscriptionLegacyFactory( _id=self.project._id + '_file_updated', owner=self.project, event_name='file_updated' ) self.sub.save() # for private node - self.private_sub = factories.NotificationSubscriptionFactory( + self.private_sub = factories.NotificationSubscriptionLegacyFactory( _id=self.private_node._id + '_file_updated', owner=self.private_node, event_name='file_updated' ) self.private_sub.save() # for file subscription - self.file_sub = factories.NotificationSubscriptionFactory( + self.file_sub = factories.NotificationSubscriptionLegacyFactory( _id='{pid}_{wbid}_file_updated'.format( pid=self.project._id, wbid=self.event.waterbutler_id diff --git a/tests/test_misc_views.py b/tests/test_misc_views.py index 35bccc88119..814ab0556f1 100644 --- a/tests/test_misc_views.py +++ b/tests/test_misc_views.py @@ -29,7 +29,6 @@ AuthUserFactory, CommentFactory, NodeFactory, - OSFGroupFactory, PreprintFactory, PreprintProviderFactory, PrivateLinkFactory, @@ -50,6 +49,7 @@ from website.project.views.node import _should_show_wiki_widget from website.util import web_url_for from website.util import rubeus +from conftest import start_mock_send_grid pytestmark = pytest.mark.django_db @@ -193,13 +193,6 @@ def test_check_can_access_valid(self): self.project.save() assert check_can_access(self.project, contributor) - def test_check_can_access_osf_group_member_valid(self): - user = AuthUserFactory() - group = OSFGroupFactory(creator=user) - self.project.add_osf_group(group, permissions.READ) - self.project.save() - assert check_can_access(self.project, user) - def test_check_user_access_invalid(self): noncontrib = AuthUserFactory() with pytest.raises(HTTPError): @@ -368,6 +361,8 @@ def test_explore(self): assert res.status_code == 200 +@mock.patch('website.mails.settings.USE_EMAIL', True) +@mock.patch('website.mails.settings.USE_CELERY', False) class TestExternalAuthViews(OsfTestCase): def setUp(self): @@ -389,6 +384,8 @@ def setUp(self): self.user.save() self.auth = (self.user.username, password) + self.mock_send_grid = start_mock_send_grid(self) + def test_external_login_email_get_with_invalid_session(self): url = web_url_for('external_login_email_get') resp = self.app.get(url) @@ -408,8 +405,7 @@ def test_external_login_confirm_email_get_without_destination(self): res = self.app.get(url, auth=self.auth) assert res.status_code == 400, 'bad request' - @mock.patch('website.mails.send_mail') - def test_external_login_confirm_email_get_create(self, mock_welcome): + def test_external_login_confirm_email_get_create(self): # TODO: check in qa url encoding assert not self.user.is_registered url = self.user.get_confirmation_url(self.user.username, external_id_provider='orcid', destination='dashboard') @@ -418,15 +414,14 @@ def test_external_login_confirm_email_get_create(self, mock_welcome): assert '/login?service=' in res.location assert quote_plus('new=true') in res.location - assert mock_welcome.call_count == 0 + assert self.mock_send_grid.call_count == 0 self.user.reload() assert self.user.external_identity['orcid'][self.provider_id] == 'VERIFIED' assert self.user.is_registered assert self.user.has_usable_password() - @mock.patch('website.mails.send_mail') - def test_external_login_confirm_email_get_link(self, mock_link_confirm): + def test_external_login_confirm_email_get_link(self): self.user.external_identity['orcid'][self.provider_id] = 'LINK' self.user.save() assert not self.user.is_registered @@ -437,15 +432,14 @@ def test_external_login_confirm_email_get_link(self, mock_link_confirm): assert '/login?service=' in res.location assert 'new=true' not in parse.unquote(res.location) - assert mock_link_confirm.call_count == 1 + assert self.mock_send_grid.call_count == 1 self.user.reload() assert self.user.external_identity['orcid'][self.provider_id] == 'VERIFIED' assert self.user.is_registered assert self.user.has_usable_password() - @mock.patch('website.mails.send_mail') - def test_external_login_confirm_email_get_duped_id(self, mock_confirm): + def test_external_login_confirm_email_get_duped_id(self): dupe_user = UserFactory(external_identity={'orcid': {self.provider_id: 'CREATE'}}) assert dupe_user.external_identity == self.user.external_identity url = self.user.get_confirmation_url(self.user.username, external_id_provider='orcid', destination='dashboard') @@ -454,7 +448,7 @@ def test_external_login_confirm_email_get_duped_id(self, mock_confirm): assert 'You should be redirected automatically' in str(res.html) assert '/login?service=' in res.location - assert mock_confirm.call_count == 0 + assert self.mock_send_grid.call_count == 0 self.user.reload() dupe_user.reload() @@ -462,14 +456,13 @@ def test_external_login_confirm_email_get_duped_id(self, mock_confirm): assert self.user.external_identity['orcid'][self.provider_id] == 'VERIFIED' assert dupe_user.external_identity == {} - @mock.patch('website.mails.send_mail') - def test_external_login_confirm_email_get_duping_id(self, mock_confirm): + def test_external_login_confirm_email_get_duping_id(self): dupe_user = UserFactory(external_identity={'orcid': {self.provider_id: 'VERIFIED'}}) url = self.user.get_confirmation_url(self.user.username, external_id_provider='orcid', destination='dashboard') res = self.app.get(url) assert res.status_code == 403, 'only allows one user to link an id' - assert mock_confirm.call_count == 0 + assert self.mock_send_grid.call_count == 0 self.user.reload() dupe_user.reload() @@ -632,17 +625,6 @@ def test_show_wiki_is_false_for_read_contributors_when_no_wiki_or_content(self): def test_show_wiki_is_false_for_noncontributors_when_no_wiki_or_content(self): assert not _should_show_wiki_widget(self.project, None) - def test_show_wiki_for_osf_group_members(self): - group = OSFGroupFactory(creator=self.noncontributor) - self.project.add_osf_group(group, permissions.READ) - assert not _should_show_wiki_widget(self.project, self.noncontributor) - assert not _should_show_wiki_widget(self.project2, self.noncontributor) - - self.project.remove_osf_group(group) - self.project.add_osf_group(group, permissions.WRITE) - assert _should_show_wiki_widget(self.project, self.noncontributor) - assert not _should_show_wiki_widget(self.project2, self.noncontributor) - class TestUnconfirmedUserViews(OsfTestCase): diff --git a/tests/test_notifications.py b/tests/test_notifications.py deleted file mode 100644 index b52190ca999..00000000000 --- a/tests/test_notifications.py +++ /dev/null @@ -1,2031 +0,0 @@ -import collections -from unittest import mock - -import pytest -from babel import dates, Locale -from schema import Schema, And, Use, Or -from django.utils import timezone - -from framework.auth import Auth -from osf.models import Comment, NotificationDigest, NotificationSubscription, Guid, OSFUser - -from website.notifications.tasks import get_users_emails, send_users_email, group_by_node, remove_notifications -from website.notifications.exceptions import InvalidSubscriptionError -from website.notifications import constants -from website.notifications import emails -from website.notifications import utils -from website import mails -from website.profile.utils import get_profile_image_url -from website.project.signals import contributor_removed, node_deleted -from website.reviews import listeners -from website.util import api_url_for -from website.util import web_url_for -from website import settings - -from osf_tests import factories -from osf.utils import permissions -from tests.base import capture_signals -from tests.base import OsfTestCase, NotificationTestCase - - - -class TestNotificationsModels(OsfTestCase): - - def setUp(self): - super().setUp() - # Create project with component - self.user = factories.UserFactory() - self.consolidate_auth = Auth(user=self.user) - self.parent = factories.ProjectFactory(creator=self.user) - self.node = factories.NodeFactory(creator=self.user, parent=self.parent) - - def test_has_permission_on_children(self): - non_admin_user = factories.UserFactory() - parent = factories.ProjectFactory() - parent.add_contributor(contributor=non_admin_user, permissions=permissions.READ) - parent.save() - - node = factories.NodeFactory(parent=parent, category='project') - sub_component = factories.NodeFactory(parent=node) - sub_component.add_contributor(contributor=non_admin_user) - sub_component.save() - sub_component2 = factories.NodeFactory(parent=node) - - assert node.has_permission_on_children(non_admin_user, permissions.READ) - - def test_check_user_has_permission_excludes_deleted_components(self): - non_admin_user = factories.UserFactory() - parent = factories.ProjectFactory() - parent.add_contributor(contributor=non_admin_user, permissions=permissions.READ) - parent.save() - - node = factories.NodeFactory(parent=parent, category='project') - sub_component = factories.NodeFactory(parent=node) - sub_component.add_contributor(contributor=non_admin_user) - sub_component.is_deleted = True - sub_component.save() - sub_component2 = factories.NodeFactory(parent=node) - - assert not node.has_permission_on_children(non_admin_user, permissions.READ) - - def test_check_user_does_not_have_permission_on_private_node_child(self): - non_admin_user = factories.UserFactory() - parent = factories.ProjectFactory() - parent.add_contributor(contributor=non_admin_user, permissions=permissions.READ) - parent.save() - node = factories.NodeFactory(parent=parent, category='project') - sub_component = factories.NodeFactory(parent=node) - - assert not node.has_permission_on_children(non_admin_user,permissions.READ) - - def test_check_user_child_node_permissions_false_if_no_children(self): - non_admin_user = factories.UserFactory() - parent = factories.ProjectFactory() - parent.add_contributor(contributor=non_admin_user, permissions=permissions.READ) - parent.save() - node = factories.NodeFactory(parent=parent, category='project') - - assert not node.has_permission_on_children(non_admin_user,permissions.READ) - - def test_check_admin_has_permissions_on_private_component(self): - parent = factories.ProjectFactory() - node = factories.NodeFactory(parent=parent, category='project') - sub_component = factories.NodeFactory(parent=node) - - assert node.has_permission_on_children(parent.creator,permissions.READ) - - def test_check_user_private_node_child_permissions_excludes_pointers(self): - user = factories.UserFactory() - parent = factories.ProjectFactory() - pointed = factories.ProjectFactory(creator=user) - parent.add_pointer(pointed, Auth(parent.creator)) - parent.save() - - assert not parent.has_permission_on_children(user,permissions.READ) - - def test_new_project_creator_is_subscribed(self): - user = factories.UserFactory() - factories.ProjectFactory(creator=user) - user_subscriptions = list(utils.get_all_user_subscriptions(user)) - event_types = [sub.event_name for sub in user_subscriptions] - - assert len(user_subscriptions) == 2 # subscribed to both file_updated and comments - assert 'file_updated' in event_types - assert 'comments' in event_types - - def test_new_node_creator_is_not_subscribed(self): - user = factories.UserFactory() - factories.NodeFactory(creator=user) - user_subscriptions = list(utils.get_all_user_subscriptions(user)) - - assert len(user_subscriptions) == 0 - - def test_new_project_creator_is_subscribed_with_global_settings(self): - user = factories.UserFactory() - - factories.NotificationSubscriptionFactory( - _id=user._id + '_' + 'global_comments', - user=user, - event_name='global_comments' - ).add_user_to_subscription(user, 'email_digest') - - factories.NotificationSubscriptionFactory( - _id=user._id + '_' + 'global_file_updated', - user=user, - event_name='global_file_updated' - ).add_user_to_subscription(user, 'none') - - factories.NotificationSubscriptionFactory( - _id=user._id + '_' + 'global_mentions', - user=user, - event_name='global_mentions' - ).add_user_to_subscription(user, 'email_digest') - - node = factories.ProjectFactory(creator=user) - - user_subscriptions = list(utils.get_all_user_subscriptions(user)) - event_types = [sub.event_name for sub in user_subscriptions] - - file_updated_subscription = NotificationSubscription.objects.get(_id=node._id + '_file_updated') - comments_subscription = NotificationSubscription.objects.get(_id=node._id + '_comments') - - assert len(user_subscriptions) == 5 # subscribed to both node and user settings - assert 'file_updated' in event_types - assert 'comments' in event_types - assert 'global_file_updated' in event_types - assert 'global_comments' in event_types - assert 'global_mentions' in event_types - assert file_updated_subscription.none.count() == 1 - assert file_updated_subscription.email_transactional.count() == 0 - assert comments_subscription.email_digest.count() == 1 - assert comments_subscription.email_transactional.count() == 0 - - def test_new_node_creator_is_not_subscribed_with_global_settings(self): - user = factories.UserFactory() - - factories.NotificationSubscriptionFactory( - _id=user._id + '_' + 'global_comments', - user=user, - event_name='global_comments' - ).add_user_to_subscription(user, 'email_digest') - - factories.NotificationSubscriptionFactory( - _id=user._id + '_' + 'global_file_updated', - user=user, - event_name='global_file_updated' - ).add_user_to_subscription(user, 'none') - - factories.NotificationSubscriptionFactory( - _id=user._id + '_' + 'global_comment_replies', - user=user, - event_name='global_comment_replies' - ).add_user_to_subscription(user, 'email_transactional') - - factories.NotificationSubscriptionFactory( - _id=user._id + '_' + 'global_mentions', - user=user, - event_name='global_mentions' - ).add_user_to_subscription(user, 'email_transactional') - - node = factories.NodeFactory(creator=user) - - user_subscriptions = list(utils.get_all_user_subscriptions(user)) - event_types = [sub.event_name for sub in user_subscriptions] - - assert len(user_subscriptions) == 4 # subscribed to only user settings - assert 'global_file_updated' in event_types - assert 'global_comments' in event_types - assert 'global_comment_replies' in event_types - assert 'global_mentions' in event_types - - def test_subscribe_user_to_global_notfiications(self): - user = factories.UserFactory() - utils.subscribe_user_to_global_notifications(user) - subscription_event_names = list(user.notification_subscriptions.values_list('event_name', flat=True)) - for event_name in constants.USER_SUBSCRIPTIONS_AVAILABLE: - assert event_name in subscription_event_names - - def test_subscribe_user_to_registration_notifications(self): - registration = factories.RegistrationFactory() - with pytest.raises(InvalidSubscriptionError): - utils.subscribe_user_to_notifications(registration, self.user) - - def test_new_project_creator_is_subscribed_with_default_global_settings(self): - user = factories.UserFactory() - - factories.NotificationSubscriptionFactory( - _id=user._id + '_' + 'global_comments', - user=user, - event_name='global_comments' - ).add_user_to_subscription(user, 'email_transactional') - - factories.NotificationSubscriptionFactory( - _id=user._id + '_' + 'global_file_updated', - user=user, - event_name='global_file_updated' - ).add_user_to_subscription(user, 'email_transactional') - - factories.NotificationSubscriptionFactory( - _id=user._id + '_' + 'global_comment_replies', - user=user, - event_name='global_comment_replies' - ).add_user_to_subscription(user, 'email_transactional') - - factories.NotificationSubscriptionFactory( - _id=user._id + '_' + 'global_mentions', - user=user, - event_name='global_mentions' - ).add_user_to_subscription(user, 'email_transactional') - - node = factories.ProjectFactory(creator=user) - - user_subscriptions = list(utils.get_all_user_subscriptions(user)) - event_types = [sub.event_name for sub in user_subscriptions] - - file_updated_subscription = NotificationSubscription.objects.get(_id=node._id + '_file_updated') - comments_subscription = NotificationSubscription.objects.get(_id=node._id + '_comments') - - assert len(user_subscriptions) == 6 # subscribed to both node and user settings - assert 'file_updated' in event_types - assert 'comments' in event_types - assert 'global_file_updated' in event_types - assert 'global_comments' in event_types - assert 'global_comment_replies' in event_types - assert 'global_mentions' in event_types - assert file_updated_subscription.email_transactional.count() == 1 - assert comments_subscription.email_transactional.count() == 1 - - def test_new_fork_creator_is_subscribed_with_default_global_settings(self): - user = factories.UserFactory() - project = factories.ProjectFactory(creator=user) - - factories.NotificationSubscriptionFactory( - _id=user._id + '_' + 'global_comments', - user=user, - event_name='global_comments' - ).add_user_to_subscription(user, 'email_transactional') - - factories.NotificationSubscriptionFactory( - _id=user._id + '_' + 'global_file_updated', - user=user, - event_name='global_file_updated' - ).add_user_to_subscription(user, 'email_transactional') - - factories.NotificationSubscriptionFactory( - _id=user._id + '_' + 'global_mentions', - user=user, - event_name='global_mentions' - ).add_user_to_subscription(user, 'email_transactional') - - node = factories.ForkFactory(project=project) - - user_subscriptions = list(utils.get_all_user_subscriptions(user)) - event_types = [sub.event_name for sub in user_subscriptions] - - node_file_updated_subscription = NotificationSubscription.objects.get(_id=node._id + '_file_updated') - node_comments_subscription = NotificationSubscription.objects.get(_id=node._id + '_comments') - project_file_updated_subscription = NotificationSubscription.objects.get(_id=project._id + '_file_updated') - project_comments_subscription = NotificationSubscription.objects.get(_id=project._id + '_comments') - - assert len(user_subscriptions) == 7 # subscribed to project, fork, and user settings - assert 'file_updated' in event_types - assert 'comments' in event_types - assert 'global_file_updated' in event_types - assert 'global_comments' in event_types - assert 'global_mentions' in event_types - assert node_file_updated_subscription.email_transactional.count() == 1 - assert node_comments_subscription.email_transactional.count() == 1 - assert project_file_updated_subscription.email_transactional.count() == 1 - assert project_comments_subscription.email_transactional.count() == 1 - - def test_new_node_creator_is_not_subscribed_with_default_global_settings(self): - user = factories.UserFactory() - - factories.NotificationSubscriptionFactory( - _id=user._id + '_' + 'global_comments', - user=user, - event_name='global_comments' - ).add_user_to_subscription(user, 'email_transactional') - - factories.NotificationSubscriptionFactory( - _id=user._id + '_' + 'global_file_updated', - user=user, - event_name='global_file_updated' - ).add_user_to_subscription(user, 'email_transactional') - - factories.NotificationSubscriptionFactory( - _id=user._id + '_' + 'global_comment_replies', - user=user, - event_name='global_comment_replies' - ).add_user_to_subscription(user, 'email_transactional') - - factories.NotificationSubscriptionFactory( - _id=user._id + '_' + 'global_mentions', - user=user, - event_name='global_mentions' - ).add_user_to_subscription(user, 'email_transactional') - - node = factories.NodeFactory(creator=user) - - user_subscriptions = list(utils.get_all_user_subscriptions(user)) - event_types = [sub.event_name for sub in user_subscriptions] - - assert len(user_subscriptions) == 4 # subscribed to only user settings - assert 'global_file_updated' in event_types - assert 'global_comments' in event_types - assert 'global_comment_replies' in event_types - assert 'global_mentions' in event_types - - - def test_contributor_subscribed_when_added_to_project(self): - user = factories.UserFactory() - contributor = factories.UserFactory() - project = factories.ProjectFactory(creator=user) - project.add_contributor(contributor=contributor) - contributor_subscriptions = list(utils.get_all_user_subscriptions(contributor)) - event_types = [sub.event_name for sub in contributor_subscriptions] - - assert len(contributor_subscriptions) == 2 - assert 'file_updated' in event_types - assert 'comments' in event_types - - def test_contributor_subscribed_when_added_to_component(self): - user = factories.UserFactory() - contributor = factories.UserFactory() - - factories.NotificationSubscriptionFactory( - _id=contributor._id + '_' + 'global_comments', - user=contributor, - event_name='global_comments' - ).add_user_to_subscription(contributor, 'email_transactional') - - factories.NotificationSubscriptionFactory( - _id=contributor._id + '_' + 'global_file_updated', - user=contributor, - event_name='global_file_updated' - ).add_user_to_subscription(contributor, 'email_transactional') - - node = factories.NodeFactory(creator=user) - node.add_contributor(contributor=contributor) - - contributor_subscriptions = list(utils.get_all_user_subscriptions(contributor)) - event_types = [sub.event_name for sub in contributor_subscriptions] - - file_updated_subscription = NotificationSubscription.objects.get(_id=node._id + '_file_updated') - comments_subscription = NotificationSubscription.objects.get(_id=node._id + '_comments') - - assert len(contributor_subscriptions) == 4 # subscribed to both node and user settings - assert 'file_updated' in event_types - assert 'comments' in event_types - assert 'global_file_updated' in event_types - assert 'global_comments' in event_types - assert file_updated_subscription.email_transactional.count() == 1 - assert comments_subscription.email_transactional.count() == 1 - - def test_unregistered_contributor_not_subscribed_when_added_to_project(self): - user = factories.AuthUserFactory() - unregistered_contributor = factories.UnregUserFactory() - project = factories.ProjectFactory(creator=user) - project.add_unregistered_contributor( - unregistered_contributor.fullname, - unregistered_contributor.email, - Auth(user), - existing_user=unregistered_contributor - ) - - contributor_subscriptions = list(utils.get_all_user_subscriptions(unregistered_contributor)) - assert len(contributor_subscriptions) == 0 - - -class TestSubscriptionView(OsfTestCase): - - def setUp(self): - super().setUp() - self.node = factories.NodeFactory() - self.user = self.node.creator - self.registration = factories.RegistrationFactory(creator=self.user) - - def test_create_new_subscription(self): - payload = { - 'id': self.node._id, - 'event': 'comments', - 'notification_type': 'email_transactional' - } - url = api_url_for('configure_subscription') - self.app.post(url, json=payload, auth=self.node.creator.auth) - - # check that subscription was created - event_id = self.node._id + '_' + 'comments' - s = NotificationSubscription.objects.get(_id=event_id) - - # check that user was added to notification_type field - assert payload['id'] == s.owner._id - assert payload['event'] == s.event_name - assert self.node.creator in getattr(s, payload['notification_type']).all() - - # change subscription - new_payload = { - 'id': self.node._id, - 'event': 'comments', - 'notification_type': 'email_digest' - } - url = api_url_for('configure_subscription') - self.app.post(url, json=new_payload, auth=self.node.creator.auth) - s.reload() - assert not self.node.creator in getattr(s, payload['notification_type']).all() - assert self.node.creator in getattr(s, new_payload['notification_type']).all() - - def test_cannot_create_registration_subscription(self): - payload = { - 'id': self.registration._id, - 'event': 'comments', - 'notification_type': 'email_transactional' - } - url = api_url_for('configure_subscription') - res = self.app.post(url, json=payload, auth=self.registration.creator.auth) - assert res.status_code == 400 - - def test_adopt_parent_subscription_default(self): - payload = { - 'id': self.node._id, - 'event': 'comments', - 'notification_type': 'adopt_parent' - } - url = api_url_for('configure_subscription') - self.app.post(url, json=payload, auth=self.node.creator.auth) - event_id = self.node._id + '_' + 'comments' - # confirm subscription was created because parent had default subscription - s = NotificationSubscription.objects.filter(_id=event_id).count() - assert 0 == s - - def test_change_subscription_to_adopt_parent_subscription_removes_user(self): - payload = { - 'id': self.node._id, - 'event': 'comments', - 'notification_type': 'email_transactional' - } - url = api_url_for('configure_subscription') - self.app.post(url, json=payload, auth=self.node.creator.auth) - - # check that subscription was created - event_id = self.node._id + '_' + 'comments' - s = NotificationSubscription.objects.get(_id=event_id) - - # change subscription to adopt_parent - new_payload = { - 'id': self.node._id, - 'event': 'comments', - 'notification_type': 'adopt_parent' - } - url = api_url_for('configure_subscription') - self.app.post(url, json=new_payload, auth=self.node.creator.auth) - s.reload() - - # assert that user is removed from the subscription entirely - for n in constants.NOTIFICATION_TYPES: - assert not self.node.creator in getattr(s, n).all() - - def test_configure_subscription_adds_node_id_to_notifications_configured(self): - project = factories.ProjectFactory(creator=self.user) - assert not project._id in self.user.notifications_configured - payload = { - 'id': project._id, - 'event': 'comments', - 'notification_type': 'email_digest' - } - url = api_url_for('configure_subscription') - self.app.post(url, json=payload, auth=project.creator.auth) - - self.user.reload() - - assert project._id in self.user.notifications_configured - - -class TestRemoveContributor(OsfTestCase): - - def setUp(self): - super(OsfTestCase, self).setUp() - self.project = factories.ProjectFactory() - self.contributor = factories.UserFactory() - self.project.add_contributor(contributor=self.contributor, permissions=permissions.READ) - self.project.save() - - self.subscription = NotificationSubscription.objects.get( - node=self.project, - _id=self.project._id + '_comments' - ) - - self.node = factories.NodeFactory(parent=self.project) - self.node.add_contributor(contributor=self.project.creator, permissions=permissions.ADMIN) - self.node.save() - - self.node_subscription = NotificationSubscription.objects.get( - _id=self.node._id + '_comments', - node=self.node - ) - self.node_subscription.add_user_to_subscription(self.node.creator, 'email_transactional') - - def test_removed_non_admin_contributor_is_removed_from_subscriptions(self): - assert self.contributor in self.subscription.email_transactional.all() - self.project.remove_contributor(self.contributor, auth=Auth(self.project.creator)) - assert self.contributor not in self.project.contributors.all() - self.subscription.reload() - assert self.contributor not in self.subscription.email_transactional.all() - - def test_removed_non_parent_admin_contributor_is_removed_from_subscriptions(self): - assert self.node.creator in self.node_subscription.email_transactional.all() - self.node.remove_contributor(self.node.creator, auth=Auth(self.node.creator)) - assert self.node.creator not in self.node.contributors.all() - self.node_subscription.reload() - assert self.node.creator not in self.node_subscription.email_transactional.all() - - def test_removed_contributor_admin_on_parent_not_removed_from_node_subscription(self): - # Admin on parent project is removed as a contributor on a component. Check - # that admin is not removed from component subscriptions, as the admin - # now has read-only access. - assert self.project.creator in self.node_subscription.email_transactional.all() - self.node.remove_contributor(self.project.creator, auth=Auth(self.project.creator)) - assert self.project.creator not in self.node.contributors.all() - assert self.project.creator in self.node_subscription.email_transactional.all() - - def test_remove_contributor_signal_called_when_contributor_is_removed(self): - with capture_signals() as mock_signals: - self.project.remove_contributor(self.contributor, auth=Auth(self.project.creator)) - assert mock_signals.signals_sent() == {contributor_removed} - - -class TestRemoveNodeSignal(OsfTestCase): - - def test_node_subscriptions_and_backrefs_removed_when_node_is_deleted(self): - project = factories.ProjectFactory() - component = factories.NodeFactory(parent=project, creator=project.creator) - - s = NotificationSubscription.objects.filter(email_transactional=project.creator) - assert s.count() == 2 - - s = NotificationSubscription.objects.filter(email_transactional=component.creator) - assert s.count() == 2 - - with capture_signals() as mock_signals: - project.remove_node(auth=Auth(project.creator)) - project.reload() - component.reload() - - assert project.is_deleted - assert component.is_deleted - assert mock_signals.signals_sent() == {node_deleted} - - s = NotificationSubscription.objects.filter(email_transactional=project.creator) - assert s.count() == 0 - - s = NotificationSubscription.objects.filter(email_transactional=component.creator) - assert s.count() == 0 - - with pytest.raises(NotificationSubscription.DoesNotExist): - NotificationSubscription.objects.get(node=project) - - with pytest.raises(NotificationSubscription.DoesNotExist): - NotificationSubscription.objects.get(node=component) - - -def list_or_dict(data): - # Generator only returns lists or dicts from list or dict - if isinstance(data, dict): - for key in data: - if isinstance(data[key], dict) or isinstance(data[key], list): - yield data[key] - elif isinstance(data, list): - for item in data: - if isinstance(item, dict) or isinstance(item, list): - yield item - - -def has(data, sub_data): - # Recursive approach to look for a subset of data in data. - # WARNING: Don't use on huge structures - # :param data: Data structure - # :param sub_data: subset being checked for - # :return: True or False - try: - next(item for item in data if item == sub_data) - return True - except StopIteration: - lists_and_dicts = list_or_dict(data) - for item in lists_and_dicts: - if has(item, sub_data): - return True - return False - - -def subscription_schema(project, structure, level=0): - # builds a schema from a list of nodes and events - # :param project: validation type - # :param structure: list of nodes (another list) and events - # :return: schema - sub_list = [] - for item in list_or_dict(structure): - sub_list.append(subscription_schema(project, item, level=level+1)) - sub_list.append(event_schema(level)) - - node_schema = { - 'node': { - 'id': Use(type(project._id), error=f'node_id{level}'), - 'title': Use(type(project.title), error=f'node_title{level}'), - 'url': Use(type(project.url), error=f'node_{level}') - }, - 'kind': And(str, Use(lambda s: s in ('node', 'folder'), - error=f"kind didn't match node or folder {level}")), - 'nodeType': Use(lambda s: s in ('project', 'component'), error='nodeType not project or component'), - 'category': Use(lambda s: s in settings.NODE_CATEGORY_MAP, error='category not in settings.NODE_CATEGORY_MAP'), - 'permissions': { - 'view': Use(lambda s: s in (True, False), error='view permissions is not True/False') - }, - 'children': sub_list - } - if level == 0: - return Schema([node_schema]) - return node_schema - - -def event_schema(level=None): - return { - 'event': { - 'title': And(Use(str, error=f'event_title{level} not a string'), - Use(lambda s: s in constants.NOTIFICATION_TYPES, - error=f'event_title{level} not in list')), - 'description': And(Use(str, error=f'event_desc{level} not a string'), - Use(lambda s: s in constants.NODE_SUBSCRIPTIONS_AVAILABLE, - error=f'event_desc{level} not in list')), - 'notificationType': And(str, Or('adopt_parent', lambda s: s in constants.NOTIFICATION_TYPES)), - 'parent_notification_type': Or(None, 'adopt_parent', lambda s: s in constants.NOTIFICATION_TYPES) - }, - 'kind': 'event', - 'children': And(list, lambda l: len(l) == 0) - } - - -class TestNotificationUtils(OsfTestCase): - - def setUp(self): - super().setUp() - self.user = factories.UserFactory() - self.project = factories.ProjectFactory(creator=self.user) - - self.project_subscription = NotificationSubscription.objects.get( - node=self.project, - _id=self.project._id + '_comments', - event_name='comments' - ) - - self.user.notifications_configured[self.project._id] = True - self.user.save() - - self.node = factories.NodeFactory(parent=self.project, creator=self.user) - - self.node_comments_subscription = factories.NotificationSubscriptionFactory( - _id=self.node._id + '_' + 'comments', - node=self.node, - event_name='comments' - ) - self.node_comments_subscription.save() - self.node_comments_subscription.email_transactional.add(self.user) - self.node_comments_subscription.save() - - self.node_subscription = list(NotificationSubscription.objects.filter(node=self.node)) - - self.user_subscription = [factories.NotificationSubscriptionFactory( - _id=self.user._id + '_' + 'comment_replies', - user=self.user, - event_name='comment_replies' - ), - factories.NotificationSubscriptionFactory( - _id=self.user._id + '_' + 'global_comment', - user=self.user, - event_name='global_comment' - ), - factories.NotificationSubscriptionFactory( - _id=self.user._id + '_' + 'global_file_updated', - user=self.user, - event_name='global_file_updated' - )] - - for x in self.user_subscription: - x.save() - for x in self.user_subscription: - x.email_transactional.add(self.user) - for x in self.user_subscription: - x.save() - - def test_to_subscription_key(self): - key = utils.to_subscription_key('xyz', 'comments') - assert key == 'xyz_comments' - - def test_from_subscription_key(self): - parsed_key = utils.from_subscription_key('xyz_comment_replies') - assert parsed_key == { - 'uid': 'xyz', - 'event': 'comment_replies' - } - - def test_get_all_user_subscriptions(self): - user_subscriptions = list(utils.get_all_user_subscriptions(self.user)) - assert self.project_subscription in user_subscriptions - assert self.node_comments_subscription in user_subscriptions - for x in self.user_subscription: - assert x in user_subscriptions - assert len(user_subscriptions) == 6 - - def test_get_all_node_subscriptions_given_user_subscriptions(self): - user_subscriptions = utils.get_all_user_subscriptions(self.user) - node_subscription_ids = [x._id for x in utils.get_all_node_subscriptions(self.user, self.node, - user_subscriptions=user_subscriptions)] - expected_node_subscription_ids = [x._id for x in self.node_subscription] - assert node_subscription_ids == expected_node_subscription_ids - - def test_get_all_node_subscriptions_given_user_and_node(self): - node_subscription_ids = [x._id for x in utils.get_all_node_subscriptions(self.user, self.node)] - expected_node_subscription_ids = [x._id for x in self.node_subscription] - assert node_subscription_ids == expected_node_subscription_ids - - def test_get_configured_project_ids_does_not_return_user_or_node_ids(self): - configured_nodes = utils.get_configured_projects(self.user) - configured_ids = [n._id for n in configured_nodes] - # No duplicates! - assert len(configured_nodes) == 1 - - assert self.project._id in configured_ids - assert self.node._id not in configured_ids - assert self.user._id not in configured_ids - - def test_get_configured_project_ids_excludes_deleted_projects(self): - project = factories.ProjectFactory() - project.is_deleted = True - project.save() - assert project not in utils.get_configured_projects(self.user) - - def test_get_configured_project_ids_excludes_node_with_project_category(self): - node = factories.NodeFactory(parent=self.project, category='project') - assert node not in utils.get_configured_projects(self.user) - - def test_get_configured_project_ids_includes_top_level_private_projects_if_subscriptions_on_node(self): - private_project = factories.ProjectFactory() - node = factories.NodeFactory(parent=private_project) - node_comments_subscription = factories.NotificationSubscriptionFactory( - _id=node._id + '_' + 'comments', - node=node, - event_name='comments' - ) - node_comments_subscription.save() - node_comments_subscription.email_transactional.add(node.creator) - node_comments_subscription.save() - - node.creator.notifications_configured[node._id] = True - node.creator.save() - configured_project_nodes = utils.get_configured_projects(node.creator) - assert private_project in configured_project_nodes - - def test_get_configured_project_ids_excludes_private_projects_if_no_subscriptions_on_node(self): - user = factories.UserFactory() - - private_project = factories.ProjectFactory() - node = factories.NodeFactory(parent=private_project) - node.add_contributor(user) - - utils.remove_contributor_from_subscriptions(node, user) - - configured_project_nodes = utils.get_configured_projects(user) - assert private_project not in configured_project_nodes - - def test_get_parent_notification_type(self): - nt = utils.get_parent_notification_type(self.node, 'comments', self.user) - assert nt == 'email_transactional' - - def test_get_parent_notification_type_no_parent_subscriptions(self): - node = factories.NodeFactory() - nt = utils.get_parent_notification_type(node._id, 'comments', self.user) - assert nt is None - - def test_get_parent_notification_type_no_parent(self): - project = factories.ProjectFactory() - nt = utils.get_parent_notification_type(project._id, 'comments', self.user) - assert nt is None - - def test_get_parent_notification_type_handles_user_id(self): - nt = utils.get_parent_notification_type(self.user._id, 'comments', self.user) - assert nt is None - - def test_format_data_project_settings(self): - data = utils.format_data(self.user, [self.project]) - parent_event = { - 'event': { - 'title': 'comments', - 'description': constants.NODE_SUBSCRIPTIONS_AVAILABLE['comments'], - 'notificationType': 'email_transactional', - 'parent_notification_type': None - }, - 'kind': 'event', - 'children': [] - } - child_event = { - 'event': { - 'title': 'comments', - 'description': constants.NODE_SUBSCRIPTIONS_AVAILABLE['comments'], - 'notificationType': 'email_transactional', - 'parent_notification_type': 'email_transactional' - }, - 'kind': 'event', - 'children': [] - } - expected_new = [['event'], 'event'] - schema = subscription_schema(self.project, expected_new) - assert schema.validate(data) - assert has(data, parent_event) - assert has(data, child_event) - - def test_format_data_node_settings(self): - data = utils.format_data(self.user, [self.node]) - event = { - 'event': { - 'title': 'comments', - 'description': constants.NODE_SUBSCRIPTIONS_AVAILABLE['comments'], - 'notificationType': 'email_transactional', - 'parent_notification_type': 'email_transactional' - }, - 'kind': 'event', - 'children': [] - } - schema = subscription_schema(self.project, ['event']) - assert schema.validate(data) - assert has(data, event) - - def test_format_includes_admin_view_only_component_subscriptions(self): - # Test private components in which parent project admins are not contributors still appear in their - # notifications settings. - node = factories.NodeFactory(parent=self.project) - data = utils.format_data(self.user, [self.project]) - event = { - 'event': { - 'title': 'comments', - 'description': constants.NODE_SUBSCRIPTIONS_AVAILABLE['comments'], - 'notificationType': 'adopt_parent', - 'parent_notification_type': 'email_transactional' - }, - 'kind': 'event', - 'children': [], - } - schema = subscription_schema(self.project, ['event', ['event'], ['event']]) - assert schema.validate(data) - assert has(data, event) - - def test_format_data_excludes_pointers(self): - project = factories.ProjectFactory() - pointed = factories.ProjectFactory() - project.add_pointer(pointed, Auth(project.creator)) - project.creator.notifications_configured[project._id] = True - project.creator.save() - configured_project_nodes = utils.get_configured_projects(project.creator) - data = utils.format_data(project.creator, configured_project_nodes) - event = { - 'event': { - 'title': 'comments', - 'description': constants.NODE_SUBSCRIPTIONS_AVAILABLE['comments'], - 'notificationType': 'email_transactional', - 'parent_notification_type': None - }, - 'kind': 'event', - 'children': [], - } - schema = subscription_schema(self.project, ['event']) - assert schema.validate(data) - assert has(data, event) - - def test_format_data_user_subscriptions_includes_private_parent_if_configured_children(self): - private_project = factories.ProjectFactory() - node = factories.NodeFactory(parent=private_project) - - node_comments_subscription = factories.NotificationSubscriptionFactory( - _id=node._id + '_' + 'comments', - node=node, - event_name='comments' - ) - node_comments_subscription.save() - node_comments_subscription.email_transactional.add(node.creator) - node_comments_subscription.save() - - node.creator.notifications_configured[node._id] = True - node.creator.save() - configured_project_nodes = utils.get_configured_projects(node.creator) - data = utils.format_data(node.creator, configured_project_nodes) - event = { - 'event': { - 'title': 'comments', - 'description': constants.NODE_SUBSCRIPTIONS_AVAILABLE['comments'], - 'notificationType': 'email_transactional', - 'parent_notification_type': None - }, - 'kind': 'event', - 'children': [], - } - schema = subscription_schema(self.project, ['event', ['event']]) - assert schema.validate(data) - assert has(data, event) - - def test_format_data_user_subscriptions_if_children_points_to_parent(self): - private_project = factories.ProjectFactory(creator=self.user) - node = factories.NodeFactory(parent=private_project, creator=self.user) - node.save() - node_comments_subscription = factories.NotificationSubscriptionFactory( - _id=node._id + '_' + 'comments', - node=node, - event_name='comments' - ) - node_comments_subscription.save() - node_comments_subscription.email_transactional.add(node.creator) - node_comments_subscription.save() - - node.creator.notifications_configured[node._id] = True - node.creator.save() - configured_project_nodes = utils.get_configured_projects(node.creator) - data = utils.format_data(node.creator, configured_project_nodes) - event = { - 'event': { - 'title': 'comments', - 'description': constants.NODE_SUBSCRIPTIONS_AVAILABLE['comments'], - 'notificationType': 'email_transactional', - 'parent_notification_type': None - }, - 'kind': 'event', - 'children': [], - } - schema = subscription_schema(self.project, ['event', ['event']]) - assert schema.validate(data) - assert has(data, event) - - def test_format_user_subscriptions(self): - data = utils.format_user_subscriptions(self.user) - expected = [ - { - 'event': { - 'title': 'global_file_updated', - 'description': constants.USER_SUBSCRIPTIONS_AVAILABLE['global_file_updated'], - 'notificationType': 'email_transactional', - 'parent_notification_type': None, - }, - 'kind': 'event', - 'children': [] - }, { - 'event': { - 'title': 'global_comment_replies', - 'description': constants.USER_SUBSCRIPTIONS_AVAILABLE['global_comment_replies'], - 'notificationType': 'email_transactional', - 'parent_notification_type': None - }, - 'kind': 'event', - 'children': [] - }, { - 'event': { - 'title': 'global_comments', - 'description': constants.USER_SUBSCRIPTIONS_AVAILABLE['global_comments'], - 'notificationType': 'email_transactional', - 'parent_notification_type': None - }, - 'kind': 'event', - 'children': [] - }, { - 'event': { - 'title': 'global_mentions', - 'description': constants.USER_SUBSCRIPTIONS_AVAILABLE['global_mentions'], - 'notificationType': 'email_transactional', - 'parent_notification_type': None - }, - 'kind': 'event', - 'children': [] - }, { - 'event': { - 'title': 'global_reviews', - 'description': constants.USER_SUBSCRIPTIONS_AVAILABLE['global_reviews'], - 'notificationType': 'email_transactional', - 'parent_notification_type': None - }, - 'kind': 'event', - 'children': [] - } - ] - - assert data == expected - - def test_get_global_notification_type(self): - notification_type = utils.get_global_notification_type(self.user_subscription[1] ,self.user) - assert 'email_transactional' == notification_type - - def test_check_if_all_global_subscriptions_are_none_false(self): - all_global_subscriptions_none = utils.check_if_all_global_subscriptions_are_none(self.user) - assert not all_global_subscriptions_none - - # # Business logic prevents this from being an applicable unit test; - # # global_mentions cannot be unsubscribed from - # def test_check_if_all_global_subscriptions_are_none_true(self): - # for x in self.user_subscription: - # x.none.add(self.user) - # x.email_transactional.remove(self.user) - # for x in self.user_subscription: - # x.save() - # all_global_subscriptions_none = utils.check_if_all_global_subscriptions_are_none(self.user) - # assert all_global_subscriptions_none - - def test_format_data_user_settings(self): - data = utils.format_user_and_project_subscriptions(self.user) - expected = [ - { - 'node': { - 'id': self.user._id, - 'title': 'Default Notification Settings', - 'help': 'These are default settings for new projects you create or are added to. Modifying these settings will not modify settings on existing projects.' - }, - 'kind': 'heading', - 'children': utils.format_user_subscriptions(self.user) - }, - { - 'node': { - 'help': 'These are settings for each of your projects. Modifying these settings will only modify the settings for the selected project.', - 'id': '', - 'title': 'Project Notifications' - }, - 'kind': 'heading', - 'children': utils.format_data(self.user, utils.get_configured_projects(self.user)) - }] - assert data == expected - - def test_serialize_user_level_event(self): - user_subscriptions = [x for x in utils.get_all_user_subscriptions(self.user)] - user_subscription = None - for subscription in user_subscriptions: - if 'global_comment_replies' in getattr(subscription, 'event_name'): - user_subscription = subscription - data = utils.serialize_event(self.user, event_description='global_comment_replies', - subscription=user_subscription) - expected = { - 'event': { - 'title': 'global_comment_replies', - 'description': constants.USER_SUBSCRIPTIONS_AVAILABLE['global_comment_replies'], - 'notificationType': 'email_transactional', - 'parent_notification_type': None - }, - 'kind': 'event', - 'children': [] - } - assert data == expected - - def test_serialize_node_level_event(self): - node_subscriptions = [x for x in utils.get_all_node_subscriptions(self.user, self.node)] - data = utils.serialize_event(user=self.user, event_description='comments', - subscription=node_subscriptions[0], node=self.node) - expected = { - 'event': { - 'title': 'comments', - 'description': constants.NODE_SUBSCRIPTIONS_AVAILABLE['comments'], - 'notificationType': 'email_transactional', - 'parent_notification_type': 'email_transactional' - }, - 'kind': 'event', - 'children': [], - } - assert data == expected - - def test_serialize_node_level_event_that_adopts_parent_settings(self): - user = factories.UserFactory() - self.project.add_contributor(contributor=user, permissions=permissions.READ) - self.project.save() - self.node.add_contributor(contributor=user, permissions=permissions.READ) - self.node.save() - - # set up how it was in original test - remove existing subscriptions - node_subscriptions = utils.get_all_node_subscriptions(user, self.node) - for subscription in node_subscriptions: - subscription.remove_user_from_subscription(user) - - node_subscriptions = utils.get_all_node_subscriptions(user, self.node) - data = utils.serialize_event(user=user, event_description='comments', - subscription=node_subscriptions, node=self.node) - expected = { - 'event': { - 'title': 'comments', - 'description': constants.NODE_SUBSCRIPTIONS_AVAILABLE['comments'], - 'notificationType': 'adopt_parent', - 'parent_notification_type': 'email_transactional' - }, - 'kind': 'event', - 'children': [], - } - assert data == expected - - -class TestNotificationsDict(OsfTestCase): - def test_notifications_dict_add_message_returns_proper_format(self): - d = utils.NotificationsDict() - message = { - 'message': 'Freddie commented on your project', - 'timestamp': timezone.now() - } - message2 = { - 'message': 'Mercury commented on your component', - 'timestamp': timezone.now() - } - - d.add_message(['project'], message) - d.add_message(['project', 'node'], message2) - - expected = { - 'messages': [], - 'children': collections.defaultdict( - utils.NotificationsDict, { - 'project': { - 'messages': [message], - 'children': collections.defaultdict(utils.NotificationsDict, { - 'node': { - 'messages': [message2], - 'children': collections.defaultdict(utils.NotificationsDict, {}) - } - }) - } - } - )} - assert d == expected - - -class TestCompileSubscriptions(NotificationTestCase): - def setUp(self): - super().setUp() - self.user_1 = factories.UserFactory() - self.user_2 = factories.UserFactory() - self.user_3 = factories.UserFactory() - self.user_4 = factories.UserFactory() - # Base project + 1 project shared with 3 + 1 project shared with 2 - self.base_project = factories.ProjectFactory(is_public=False, creator=self.user_1) - self.shared_node = factories.NodeFactory(parent=self.base_project, is_public=False, creator=self.user_1) - self.private_node = factories.NodeFactory(parent=self.base_project, is_public=False, creator=self.user_1) - # Adding contributors - for node in [self.base_project, self.shared_node, self.private_node]: - node.add_contributor(self.user_2, permissions=permissions.ADMIN) - self.base_project.add_contributor(self.user_3, permissions=permissions.WRITE) - self.shared_node.add_contributor(self.user_3, permissions=permissions.WRITE) - # Setting basic subscriptions - self.base_sub = factories.NotificationSubscriptionFactory( - _id=self.base_project._id + '_file_updated', - node=self.base_project, - event_name='file_updated' - ) - self.base_sub.save() - self.shared_sub = factories.NotificationSubscriptionFactory( - _id=self.shared_node._id + '_file_updated', - node=self.shared_node, - event_name='file_updated' - ) - self.shared_sub.save() - self.private_sub = factories.NotificationSubscriptionFactory( - _id=self.private_node._id + '_file_updated', - node=self.private_node, - event_name='file_updated' - ) - self.private_sub.save() - - def test_no_subscription(self): - node = factories.NodeFactory() - result = emails.compile_subscriptions(node, 'file_updated') - assert {'email_transactional': [], 'none': [], 'email_digest': []} == result - - def test_no_subscribers(self): - node = factories.NodeFactory() - node_sub = factories.NotificationSubscriptionFactory( - _id=node._id + '_file_updated', - node=node, - event_name='file_updated' - ) - node_sub.save() - result = emails.compile_subscriptions(node, 'file_updated') - assert {'email_transactional': [], 'none': [], 'email_digest': []} == result - - def test_creator_subbed_parent(self): - # Basic sub check - self.base_sub.email_transactional.add(self.user_1) - self.base_sub.save() - result = emails.compile_subscriptions(self.base_project, 'file_updated') - assert {'email_transactional': [self.user_1._id], 'none': [], 'email_digest': []} == result - - def test_creator_subbed_to_parent_from_child(self): - # checks the parent sub is the one to appear without a child sub - self.base_sub.email_transactional.add(self.user_1) - self.base_sub.save() - result = emails.compile_subscriptions(self.shared_node, 'file_updated') - assert {'email_transactional': [self.user_1._id], 'none': [], 'email_digest': []} == result - - def test_creator_subbed_to_both_from_child(self): - # checks that only one sub is in the list. - self.base_sub.email_transactional.add(self.user_1) - self.base_sub.save() - self.shared_sub.email_transactional.add(self.user_1) - self.shared_sub.save() - result = emails.compile_subscriptions(self.shared_node, 'file_updated') - assert {'email_transactional': [self.user_1._id], 'none': [], 'email_digest': []} == result - - def test_creator_diff_subs_to_both_from_child(self): - # Check that the child node sub overrides the parent node sub - self.base_sub.email_transactional.add(self.user_1) - self.base_sub.save() - self.shared_sub.none.add(self.user_1) - self.shared_sub.save() - result = emails.compile_subscriptions(self.shared_node, 'file_updated') - assert {'email_transactional': [], 'none': [self.user_1._id], 'email_digest': []} == result - - def test_user_wo_permission_on_child_node_not_listed(self): - # Tests to see if a user without permission gets an Email about a node they cannot see. - self.base_sub.email_transactional.add(self.user_3) - self.base_sub.save() - result = emails.compile_subscriptions(self.private_node, 'file_updated') - assert {'email_transactional': [], 'none': [], 'email_digest': []} == result - - def test_several_nodes_deep(self): - self.base_sub.email_transactional.add(self.user_1) - self.base_sub.save() - node2 = factories.NodeFactory(parent=self.shared_node) - node3 = factories.NodeFactory(parent=node2) - node4 = factories.NodeFactory(parent=node3) - node5 = factories.NodeFactory(parent=node4) - subs = emails.compile_subscriptions(node5, 'file_updated') - assert subs == {'email_transactional': [self.user_1._id], 'email_digest': [], 'none': []} - - def test_several_nodes_deep_precedence(self): - self.base_sub.email_transactional.add(self.user_1) - self.base_sub.save() - node2 = factories.NodeFactory(parent=self.shared_node) - node3 = factories.NodeFactory(parent=node2) - node4 = factories.NodeFactory(parent=node3) - node4_subscription = factories.NotificationSubscriptionFactory( - _id=node4._id + '_file_updated', - node=node4, - event_name='file_updated' - ) - node4_subscription.save() - node4_subscription.email_digest.add(self.user_1) - node4_subscription.save() - node5 = factories.NodeFactory(parent=node4) - subs = emails.compile_subscriptions(node5, 'file_updated') - assert subs == {'email_transactional': [], 'email_digest': [self.user_1._id], 'none': []} - - -class TestMoveSubscription(NotificationTestCase): - def setUp(self): - super().setUp() - self.blank = {key: [] for key in constants.NOTIFICATION_TYPES} # For use where it is blank. - self.user_1 = factories.AuthUserFactory() - self.auth = Auth(user=self.user_1) - self.user_2 = factories.AuthUserFactory() - self.user_3 = factories.AuthUserFactory() - self.user_4 = factories.AuthUserFactory() - self.project = factories.ProjectFactory(creator=self.user_1) - self.private_node = factories.NodeFactory(parent=self.project, is_public=False, creator=self.user_1) - self.sub = factories.NotificationSubscriptionFactory( - _id=self.project._id + '_file_updated', - node=self.project, - event_name='file_updated' - ) - self.sub.email_transactional.add(self.user_1) - self.sub.save() - self.file_sub = factories.NotificationSubscriptionFactory( - _id=self.project._id + '_xyz42_file_updated', - node=self.project, - event_name='xyz42_file_updated' - ) - self.file_sub.save() - - def test_separate_users(self): - self.private_node.add_contributor(self.user_2, permissions=permissions.ADMIN, auth=self.auth) - self.private_node.add_contributor(self.user_3, permissions=permissions.WRITE, auth=self.auth) - self.private_node.save() - subbed, removed = utils.separate_users( - self.private_node, [self.user_2._id, self.user_3._id, self.user_4._id] - ) - assert [self.user_2._id, self.user_3._id] == subbed - assert [self.user_4._id] == removed - - def test_event_subs_same(self): - self.file_sub.email_transactional.add(self.user_2, self.user_3, self.user_4) - self.file_sub.save() - self.private_node.add_contributor(self.user_2, permissions=permissions.ADMIN, auth=self.auth) - self.private_node.add_contributor(self.user_3, permissions=permissions.WRITE, auth=self.auth) - self.private_node.save() - results = utils.users_to_remove('xyz42_file_updated', self.project, self.private_node) - assert {'email_transactional': [self.user_4._id], 'email_digest': [], 'none': []} == results - - def test_event_nodes_same(self): - self.file_sub.email_transactional.add(self.user_2, self.user_3, self.user_4) - self.file_sub.save() - self.private_node.add_contributor(self.user_2, permissions=permissions.ADMIN, auth=self.auth) - self.private_node.add_contributor(self.user_3, permissions=permissions.WRITE, auth=self.auth) - self.private_node.save() - results = utils.users_to_remove('xyz42_file_updated', self.project, self.project) - assert {'email_transactional': [], 'email_digest': [], 'none': []} == results - - def test_move_sub(self): - # Tests old sub is replaced with new sub. - utils.move_subscription(self.blank, 'xyz42_file_updated', self.project, 'abc42_file_updated', self.private_node) - self.file_sub.reload() - assert 'abc42_file_updated' == self.file_sub.event_name - assert self.private_node == self.file_sub.owner - assert self.private_node._id + '_abc42_file_updated' == self.file_sub._id - - def test_move_sub_with_none(self): - # Attempt to reproduce an error that is seen when moving files - self.project.add_contributor(self.user_2, permissions=permissions.WRITE, auth=self.auth) - self.project.save() - self.file_sub.none.add(self.user_2) - self.file_sub.save() - results = utils.users_to_remove('xyz42_file_updated', self.project, self.private_node) - assert {'email_transactional': [], 'email_digest': [], 'none': [self.user_2._id]} == results - - def test_remove_one_user(self): - # One user doesn't have permissions on the node the sub is moved to. Should be listed. - self.file_sub.email_transactional.add(self.user_2, self.user_3, self.user_4) - self.file_sub.save() - self.private_node.add_contributor(self.user_2, permissions=permissions.ADMIN, auth=self.auth) - self.private_node.add_contributor(self.user_3, permissions=permissions.WRITE, auth=self.auth) - self.private_node.save() - results = utils.users_to_remove('xyz42_file_updated', self.project, self.private_node) - assert {'email_transactional': [self.user_4._id], 'email_digest': [], 'none': []} == results - - def test_remove_one_user_warn_another(self): - # Two users do not have permissions on new node, but one has a project sub. Both should be listed. - self.private_node.add_contributor(self.user_2, permissions=permissions.ADMIN, auth=self.auth) - self.private_node.save() - self.project.add_contributor(self.user_3, permissions=permissions.WRITE, auth=self.auth) - self.project.save() - self.sub.email_digest.add(self.user_3) - self.sub.save() - self.file_sub.email_transactional.add(self.user_2, self.user_4) - - results = utils.users_to_remove('xyz42_file_updated', self.project, self.private_node) - utils.move_subscription(results, 'xyz42_file_updated', self.project, 'abc42_file_updated', self.private_node) - assert {'email_transactional': [self.user_4._id], 'email_digest': [self.user_3._id], 'none': []} == results - assert self.sub.email_digest.filter(id=self.user_3.id).exists() # Is not removed from the project subscription. - - def test_warn_user(self): - # One user with a project sub does not have permission on new node. User should be listed. - self.private_node.add_contributor(self.user_2, permissions=permissions.ADMIN, auth=self.auth) - self.private_node.save() - self.project.add_contributor(self.user_3, permissions=permissions.WRITE, auth=self.auth) - self.project.save() - self.sub.email_digest.add(self.user_3) - self.sub.save() - self.file_sub.email_transactional.add(self.user_2) - results = utils.users_to_remove('xyz42_file_updated', self.project, self.private_node) - utils.move_subscription(results, 'xyz42_file_updated', self.project, 'abc42_file_updated', self.private_node) - assert {'email_transactional': [], 'email_digest': [self.user_3._id], 'none': []} == results - assert self.user_3 in self.sub.email_digest.all() # Is not removed from the project subscription. - - def test_user_node_subbed_and_not_removed(self): - self.project.add_contributor(self.user_3, permissions=permissions.WRITE, auth=self.auth) - self.project.save() - self.private_node.add_contributor(self.user_3, permissions=permissions.WRITE, auth=self.auth) - self.private_node.save() - self.sub.email_digest.add(self.user_3) - self.sub.save() - utils.move_subscription(self.blank, 'xyz42_file_updated', self.project, 'abc42_file_updated', self.private_node) - assert not self.file_sub.email_digest.filter().exists() - - # Regression test for commit ea15186 - def test_garrulous_event_name(self): - self.file_sub.email_transactional.add(self.user_2, self.user_3, self.user_4) - self.file_sub.save() - self.private_node.add_contributor(self.user_2, permissions=permissions.ADMIN, auth=self.auth) - self.private_node.add_contributor(self.user_3, permissions=permissions.WRITE, auth=self.auth) - self.private_node.save() - results = utils.users_to_remove('complicated/path_to/some/file/ASDFASDF.txt_file_updated', self.project, self.private_node) - assert {'email_transactional': [], 'email_digest': [], 'none': []} == results - -class TestSendEmails(NotificationTestCase): - def setUp(self): - super().setUp() - self.user = factories.AuthUserFactory() - self.project = factories.ProjectFactory() - self.project_subscription = factories.NotificationSubscriptionFactory( - _id=self.project._id + '_' + 'comments', - node=self.project, - event_name='comments' - ) - self.project_subscription.save() - self.project_subscription.email_transactional.add(self.project.creator) - self.project_subscription.save() - - self.node = factories.NodeFactory(parent=self.project) - self.node_subscription = factories.NotificationSubscriptionFactory( - _id=self.node._id + '_comments', - node=self.node, - event_name='comments' - ) - self.node_subscription.save() - self.user_subscription = factories.NotificationSubscriptionFactory( - _id=self.user._id + '_' + 'global_comment_replies', - node=self.node, - event_name='global_comment_replies' - ) - self.user_subscription.email_transactional.add(self.user) - self.user_subscription.save() - - @mock.patch('website.notifications.emails.store_emails') - def test_notify_no_subscription(self, mock_store): - node = factories.ProjectFactory() - user = factories.AuthUserFactory() - emails.notify('comments', user=user, node=node, timestamp=timezone.now()) - assert not mock_store.called - - @mock.patch('website.notifications.emails.store_emails') - def test_notify_no_subscribers(self, mock_store): - node = factories.NodeFactory() - node_subscription = factories.NotificationSubscriptionFactory( - _id=node._id + '_comments', - node=node, - event_name='comments' - ) - node_subscription.save() - emails.notify('comments', user=self.user, node=node, timestamp=timezone.now()) - assert not mock_store.called - - @mock.patch('website.notifications.emails.store_emails') - def test_notify_sends_with_correct_args(self, mock_store): - time_now = timezone.now() - emails.notify('comments', user=self.user, node=self.node, timestamp=time_now) - assert mock_store.called - mock_store.assert_called_with([self.project.creator._id], 'email_transactional', 'comments', self.user, - self.node, time_now) - - @mock.patch('website.notifications.emails.store_emails') - def test_notify_does_not_send_to_exclude(self, mock_store): - time_now = timezone.now() - context = {'exclude':[self.project.creator._id]} - emails.notify('comments', user=self.user, node=self.node, timestamp=time_now, **context) - assert mock_store.call_count == 0 - - @mock.patch('website.notifications.emails.store_emails') - def test_notify_does_not_send_to_users_subscribed_to_none(self, mock_store): - node = factories.NodeFactory() - user = factories.UserFactory() - node_subscription = factories.NotificationSubscriptionFactory( - _id=node._id + '_comments', - node=node, - event_name='comments' - ) - node_subscription.save() - node_subscription.none.add(user) - node_subscription.save() - sent = emails.notify('comments', user=user, node=node, timestamp=timezone.now()) - assert not mock_store.called - assert sent == [] - - @mock.patch('website.notifications.emails.store_emails') - def test_notify_mentions_does_not_send_to_mentioned_users_subscribed_to_none(self, mock_store): - node = factories.NodeFactory() - user = factories.UserFactory() - factories.NotificationSubscriptionFactory( - _id=user._id + '_global_mentions', - node=self.node, - event_name='global_mentions' - ).add_user_to_subscription(user, 'none') - time_now = timezone.now() - sent = emails.notify_mentions('global_mentions', user=user, node=node, timestamp=time_now, new_mentions=[user._id]) - assert not mock_store.called - assert sent == [] - - @mock.patch('website.notifications.emails.store_emails') - def test_notify_mentions_does_send_to_mentioned_users(self, mock_store): - user = factories.UserFactory() - factories.NotificationSubscriptionFactory( - _id=user._id + '_global_mentions', - node=self.node, - event_name='global_mentions' - ).add_user_to_subscription(user, 'email_transactional') - node = factories.ProjectFactory(creator=user) - time_now = timezone.now() - emails.notify_mentions('global_mentions', user=user, node=node, timestamp=time_now, new_mentions=[user._id]) - assert mock_store.called - mock_store.assert_called_with( - [node.creator._id], - 'email_transactional', - 'global_mentions', - user, - node, - time_now, - template=None, - new_mentions=[node.creator._id], - is_creator=(user == node.creator), - ) - - @mock.patch('website.notifications.emails.store_emails') - def test_notify_sends_comment_reply_event_if_comment_is_direct_reply(self, mock_store): - time_now = timezone.now() - emails.notify('comments', user=self.user, node=self.node, timestamp=time_now, target_user=self.project.creator) - mock_store.assert_called_with([self.project.creator._id], 'email_transactional', 'comment_replies', - self.user, self.node, time_now, target_user=self.project.creator) - - @mock.patch('website.notifications.emails.store_emails') - def test_notify_sends_comment_reply_when_target_user_is_subscribed_via_user_settings(self, mock_store): - time_now = timezone.now() - emails.notify('global_comment_replies', user=self.project.creator, node=self.node, timestamp=time_now, target_user=self.user) - mock_store.assert_called_with([self.user._id], 'email_transactional', 'comment_replies', - self.project.creator, self.node, time_now, target_user=self.user) - - @mock.patch('website.notifications.emails.store_emails') - def test_notify_sends_comment_event_if_comment_reply_is_not_direct_reply(self, mock_store): - user = factories.UserFactory() - time_now = timezone.now() - emails.notify('comments', user=user, node=self.node, timestamp=time_now, target_user=user) - mock_store.assert_called_with([self.project.creator._id], 'email_transactional', 'comments', user, - self.node, time_now, target_user=user) - - @mock.patch('website.mails.send_mail') - @mock.patch('website.notifications.emails.store_emails') - def test_notify_does_not_send_comment_if_they_reply_to_their_own_comment(self, mock_store, mock_send_mail): - time_now = timezone.now() - emails.notify('comments', user=self.project.creator, node=self.project, timestamp=time_now, - target_user=self.project.creator) - assert not mock_store.called - assert not mock_send_mail.called - - @mock.patch('website.notifications.emails.store_emails') - def test_notify_sends_comment_event_if_comment_reply_is_not_direct_reply_on_component(self, mock_store): - # Test that comment replies on components that are not direct replies to the subscriber use the - # "comments" email template. - user = factories.UserFactory() - time_now = timezone.now() - emails.notify('comments', user, self.node, time_now, target_user=user) - mock_store.assert_called_with([self.project.creator._id], 'email_transactional', 'comments', user, - self.node, time_now, target_user=user) - - def test_check_node_node_none(self): - subs = emails.check_node(None, 'comments') - assert subs == {'email_transactional': [], 'email_digest': [], 'none': []} - - def test_check_node_one(self): - subs = emails.check_node(self.project, 'comments') - assert subs == {'email_transactional': [self.project.creator._id], 'email_digest': [], 'none': []} - - @mock.patch('website.project.views.comment.notify') - def test_check_user_comment_reply_subscription_if_email_not_sent_to_target_user(self, mock_notify): - # user subscribed to comment replies - user = factories.UserFactory() - user_subscription = factories.NotificationSubscriptionFactory( - _id=user._id + '_comments', - user=user, - event_name='comment_replies' - ) - user_subscription.email_transactional.add(user) - user_subscription.save() - - # user is not subscribed to project comment notifications - project = factories.ProjectFactory() - - # user comments on project - target = factories.CommentFactory(node=project, user=user) - content = 'hammer to fall' - - # reply to user (note: notify is called from Comment.create) - reply = Comment.create( - auth=Auth(project.creator), - user=project.creator, - node=project, - content=content, - target=Guid.load(target._id), - root_target=Guid.load(project._id), - ) - assert mock_notify.called - assert mock_notify.call_count == 2 - - @mock.patch('website.project.views.comment.notify') - def test_check_user_comment_reply_only_calls_once(self, mock_notify): - # user subscribed to comment replies - user = factories.UserFactory() - user_subscription = factories.NotificationSubscriptionFactory( - _id=user._id + '_comments', - user=user, - event_name='comment_replies' - ) - user_subscription.email_transactional.add(user) - user_subscription.save() - - project = factories.ProjectFactory() - - # user comments on project - target = factories.CommentFactory(node=project, user=user) - content = 'P-Hacking: A user guide' - - mock_notify.return_value = [user._id] - # reply to user (note: notify is called from Comment.create) - reply = Comment.create( - auth=Auth(project.creator), - user=project.creator, - node=project, - content=content, - target=Guid.load(target._id), - root_target=Guid.load(project._id), - ) - assert mock_notify.called - assert mock_notify.call_count == 1 - - def test_get_settings_url_for_node(self): - url = emails.get_settings_url(self.project._id, self.user) - assert url == self.project.absolute_url + 'settings/' - - def test_get_settings_url_for_user(self): - url = emails.get_settings_url(self.user._id, self.user) - assert url == web_url_for('user_notifications', _absolute=True) - - def test_get_node_lineage(self): - node_lineage = emails.get_node_lineage(self.node) - assert node_lineage == [self.project._id, self.node._id] - - def test_fix_locale(self): - assert emails.fix_locale('en') == 'en' - assert emails.fix_locale('de_DE') == 'de_DE' - assert emails.fix_locale('de_de') == 'de_DE' - - def test_localize_timestamp(self): - timestamp = timezone.now() - self.user.timezone = 'America/New_York' - self.user.locale = 'en_US' - self.user.save() - tz = dates.get_timezone(self.user.timezone) - locale = Locale(self.user.locale) - formatted_date = dates.format_date(timestamp, format='full', locale=locale) - formatted_time = dates.format_time(timestamp, format='short', tzinfo=tz, locale=locale) - formatted_datetime = f'{formatted_time} on {formatted_date}' - assert emails.localize_timestamp(timestamp, self.user) == formatted_datetime - - def test_localize_timestamp_empty_timezone(self): - timestamp = timezone.now() - self.user.timezone = '' - self.user.locale = 'en_US' - self.user.save() - tz = dates.get_timezone('Etc/UTC') - locale = Locale(self.user.locale) - formatted_date = dates.format_date(timestamp, format='full', locale=locale) - formatted_time = dates.format_time(timestamp, format='short', tzinfo=tz, locale=locale) - formatted_datetime = f'{formatted_time} on {formatted_date}' - assert emails.localize_timestamp(timestamp, self.user) == formatted_datetime - - def test_localize_timestamp_empty_locale(self): - timestamp = timezone.now() - self.user.timezone = 'America/New_York' - self.user.locale = '' - self.user.save() - tz = dates.get_timezone(self.user.timezone) - locale = Locale('en') - formatted_date = dates.format_date(timestamp, format='full', locale=locale) - formatted_time = dates.format_time(timestamp, format='short', tzinfo=tz, locale=locale) - formatted_datetime = f'{formatted_time} on {formatted_date}' - assert emails.localize_timestamp(timestamp, self.user) == formatted_datetime - - def test_localize_timestamp_handles_unicode(self): - timestamp = timezone.now() - self.user.timezone = 'Europe/Moscow' - self.user.locale = 'ru_RU' - self.user.save() - tz = dates.get_timezone(self.user.timezone) - locale = Locale(self.user.locale) - formatted_date = dates.format_date(timestamp, format='full', locale=locale) - formatted_time = dates.format_time(timestamp, format='short', tzinfo=tz, locale=locale) - formatted_datetime = f'{formatted_time} on {formatted_date}' - assert emails.localize_timestamp(timestamp, self.user) == formatted_datetime - - -class TestSendDigest(OsfTestCase): - def setUp(self): - super().setUp() - self.user_1 = factories.UserFactory() - self.user_2 = factories.UserFactory() - self.project = factories.ProjectFactory() - self.timestamp = timezone.now() - - def test_group_notifications_by_user_transactional(self): - send_type = 'email_transactional' - d = factories.NotificationDigestFactory( - user=self.user_1, - send_type=send_type, - timestamp=self.timestamp, - message='Hello', - node_lineage=[self.project._id] - ) - d.save() - d2 = factories.NotificationDigestFactory( - user=self.user_2, - send_type=send_type, - timestamp=self.timestamp, - message='Hello', - node_lineage=[self.project._id] - ) - d2.save() - d3 = factories.NotificationDigestFactory( - user=self.user_2, - send_type='email_digest', - timestamp=self.timestamp, - message='Hello, but this should not appear (this is a digest)', - node_lineage=[self.project._id] - ) - d3.save() - user_groups = list(get_users_emails(send_type)) - expected = [ - { - 'user_id': self.user_1._id, - 'info': [{ - 'message': 'Hello', - 'node_lineage': [str(self.project._id)], - '_id': d._id - }] - }, - { - 'user_id': self.user_2._id, - 'info': [{ - 'message': 'Hello', - 'node_lineage': [str(self.project._id)], - '_id': d2._id - }] - } - ] - - assert len(user_groups) == 2 - assert user_groups == expected - digest_ids = [d._id, d2._id, d3._id] - remove_notifications(email_notification_ids=digest_ids) - - def test_group_notifications_by_user_digest(self): - send_type = 'email_digest' - d = factories.NotificationDigestFactory( - user=self.user_1, - send_type=send_type, - event='comment_replies', - timestamp=self.timestamp, - message='Hello', - node_lineage=[self.project._id] - ) - d.save() - d2 = factories.NotificationDigestFactory( - user=self.user_2, - send_type=send_type, - timestamp=self.timestamp, - message='Hello', - node_lineage=[self.project._id] - ) - d2.save() - d3 = factories.NotificationDigestFactory( - user=self.user_2, - send_type='email_transactional', - timestamp=self.timestamp, - message='Hello, but this should not appear (this is transactional)', - node_lineage=[self.project._id] - ) - d3.save() - user_groups = list(get_users_emails(send_type)) - expected = [ - { - 'user_id': str(self.user_1._id), - 'info': [{ - 'message': 'Hello', - 'node_lineage': [str(self.project._id)], - '_id': str(d._id) - }] - }, - { - 'user_id': str(self.user_2._id), - 'info': [{ - 'message': 'Hello', - 'node_lineage': [str(self.project._id)], - '_id': str(d2._id) - }] - } - ] - - assert len(user_groups) == 2 - assert user_groups == expected - digest_ids = [d._id, d2._id, d3._id] - remove_notifications(email_notification_ids=digest_ids) - - @mock.patch('website.mails.send_mail') - def test_send_users_email_called_with_correct_args(self, mock_send_mail): - send_type = 'email_transactional' - d = factories.NotificationDigestFactory( - send_type=send_type, - event='comment_replies', - timestamp=timezone.now(), - message='Hello', - node_lineage=[factories.ProjectFactory()._id] - ) - d.save() - user_groups = list(get_users_emails(send_type)) - send_users_email(send_type) - assert mock_send_mail.called - assert mock_send_mail.call_count == len(user_groups) - - last_user_index = len(user_groups) - 1 - user = OSFUser.load(user_groups[last_user_index]['user_id']) - - args, kwargs = mock_send_mail.call_args - - assert kwargs['to_addr'] == user.username - assert kwargs['mail'] == mails.DIGEST - assert kwargs['name'] == user.fullname - assert kwargs['can_change_node_preferences'] == True - message = group_by_node(user_groups[last_user_index]['info']) - assert kwargs['message'] == message - - @mock.patch('website.mails.send_mail') - def test_send_users_email_ignores_disabled_users(self, mock_send_mail): - send_type = 'email_transactional' - d = factories.NotificationDigestFactory( - send_type=send_type, - event='comment_replies', - timestamp=timezone.now(), - message='Hello', - node_lineage=[factories.ProjectFactory()._id] - ) - d.save() - - user_groups = list(get_users_emails(send_type)) - last_user_index = len(user_groups) - 1 - - user = OSFUser.load(user_groups[last_user_index]['user_id']) - user.is_disabled = True - user.save() - - send_users_email(send_type) - assert not mock_send_mail.called - - def test_remove_sent_digest_notifications(self): - d = factories.NotificationDigestFactory( - event='comment_replies', - timestamp=timezone.now(), - message='Hello', - node_lineage=[factories.ProjectFactory()._id] - ) - digest_id = d._id - remove_notifications(email_notification_ids=[digest_id]) - with pytest.raises(NotificationDigest.DoesNotExist): - NotificationDigest.objects.get(_id=digest_id) - -class TestNotificationsReviews(OsfTestCase): - def setUp(self): - super().setUp() - self.provider = factories.PreprintProviderFactory(_id='engrxiv') - self.preprint = factories.PreprintFactory(provider=self.provider) - self.user = factories.UserFactory() - self.sender = factories.UserFactory() - self.context_info = { - 'email_sender': self.sender, - 'domain': 'osf.io', - 'reviewable': self.preprint, - 'workflow': 'pre-moderation', - 'provider_contact_email': settings.OSF_CONTACT_EMAIL, - 'provider_support_email': settings.OSF_SUPPORT_EMAIL, - } - self.action = factories.ReviewActionFactory() - factories.NotificationSubscriptionFactory( - _id=self.user._id + '_' + 'global_comments', - user=self.user, - event_name='global_comments' - ).add_user_to_subscription(self.user, 'email_transactional') - - factories.NotificationSubscriptionFactory( - _id=self.user._id + '_' + 'global_file_updated', - user=self.user, - event_name='global_file_updated' - ).add_user_to_subscription(self.user, 'email_transactional') - - factories.NotificationSubscriptionFactory( - _id=self.user._id + '_' + 'global_reviews', - user=self.user, - event_name='global_reviews' - ).add_user_to_subscription(self.user, 'email_transactional') - - def test_reviews_base_notification(self): - contributor_subscriptions = list(utils.get_all_user_subscriptions(self.user)) - event_types = [sub.event_name for sub in contributor_subscriptions] - assert 'global_reviews' in event_types - - @mock.patch('website.mails.mails.send_mail') - def test_reviews_submit_notification(self, mock_send_email): - listeners.reviews_submit_notification(self, context=self.context_info, recipients=[self.sender, self.user]) - assert mock_send_email.called - - @mock.patch('website.notifications.emails.notify_global_event') - def test_reviews_notification(self, mock_notify): - listeners.reviews_notification(self, creator=self.sender, context=self.context_info, action=self.action, template='test.html.mako') - assert mock_notify.called - - -class QuerySetMatcher: - def __init__(self, some_obj): - self.some_obj = some_obj - - def __eq__(self, other): - return list(self.some_obj) == list(other) - - -class TestNotificationsReviewsModerator(OsfTestCase): - - def setUp(self): - super().setUp() - self.provider = factories.PreprintProviderFactory(_id='engrxiv') - self.preprint = factories.PreprintFactory(provider=self.provider) - self.submitter = factories.UserFactory() - self.moderator_transacitonal = factories.UserFactory() - self.moderator_digest= factories.UserFactory() - - self.context_info_submission = { - 'referrer': self.submitter, - 'domain': 'osf.io', - 'reviewable': self.preprint, - 'workflow': 'pre-moderation', - 'provider_contact_email': settings.OSF_CONTACT_EMAIL, - 'provider_support_email': settings.OSF_SUPPORT_EMAIL, - } - - self.context_info_request = { - 'requester': self.submitter, - 'domain': 'osf.io', - 'reviewable': self.preprint, - 'workflow': 'pre-moderation', - 'provider_contact_email': settings.OSF_CONTACT_EMAIL, - 'provider_support_email': settings.OSF_SUPPORT_EMAIL, - } - - self.action = factories.ReviewActionFactory() - self.subscription = NotificationSubscription.load(self.provider._id+'_new_pending_submissions') - self.subscription.add_user_to_subscription(self.moderator_transacitonal, 'email_transactional') - self.subscription.add_user_to_subscription(self.moderator_digest, 'email_digest') - - @mock.patch('website.notifications.emails.store_emails') - def test_reviews_submit_notification(self, mock_store): - time_now = timezone.now() - - preprint = self.context_info_submission['reviewable'] - provider = preprint.provider - - self.context_info_submission['message'] = f'submitted {preprint.title}.' - self.context_info_submission['profile_image_url'] = get_profile_image_url(self.context_info_submission['referrer']) - self.context_info_submission['reviews_submission_url'] = f'{settings.DOMAIN}reviews/preprints/{provider._id}/{preprint._id}' - listeners.reviews_submit_notification_moderators(self, time_now, self.context_info_submission) - subscription = NotificationSubscription.load(self.provider._id + '_new_pending_submissions') - digest_subscriber_ids = list(subscription.email_digest.all().values_list('guids___id', flat=True)) - instant_subscriber_ids = list(subscription.email_transactional.all().values_list('guids___id', flat=True)) - - mock_store.assert_any_call( - digest_subscriber_ids, - 'email_digest', - 'new_pending_submissions', - self.context_info_submission['referrer'], - self.context_info_submission['reviewable'], - time_now, - abstract_provider=self.context_info_submission['reviewable'].provider, - **self.context_info_submission - ) - - mock_store.assert_any_call( - instant_subscriber_ids, - 'email_transactional', - 'new_pending_submissions', - self.context_info_submission['referrer'], - self.context_info_submission['reviewable'], - time_now, - abstract_provider=self.context_info_request['reviewable'].provider, - **self.context_info_submission - ) - - @mock.patch('website.notifications.emails.store_emails') - def test_reviews_request_notification(self, mock_store): - time_now = timezone.now() - self.context_info_request['message'] = 'has requested withdrawal of {} "{}".'.format(self.context_info_request['reviewable'].provider.preprint_word, - self.context_info_request['reviewable'].title) - self.context_info_request['profile_image_url'] = get_profile_image_url(self.context_info_request['requester']) - self.context_info_request['reviews_submission_url'] = '{}reviews/preprints/{}/{}'.format(settings.DOMAIN, - self.context_info_request[ - 'reviewable'].provider._id, - self.context_info_request[ - 'reviewable']._id) - listeners.reviews_withdrawal_requests_notification(self, time_now, self.context_info_request) - subscription = NotificationSubscription.load(self.provider._id + '_new_pending_submissions') - digest_subscriber_ids = subscription.email_digest.all().values_list('guids___id', flat=True) - instant_subscriber_ids = subscription.email_transactional.all().values_list('guids___id', flat=True) - mock_store.assert_any_call(QuerySetMatcher(digest_subscriber_ids), - 'email_digest', - 'new_pending_submissions', - self.context_info_request['requester'], - self.context_info_request['reviewable'], - time_now, - abstract_provider=self.context_info_request['reviewable'].provider, - **self.context_info_request) - - mock_store.assert_any_call(QuerySetMatcher(instant_subscriber_ids), - 'email_transactional', - 'new_pending_submissions', - self.context_info_request['requester'], - self.context_info_request['reviewable'], - time_now, - abstract_provider=self.context_info_request['reviewable'].provider, - **self.context_info_request) diff --git a/tests/test_preprints.py b/tests/test_preprints.py index 5528ef28219..13d44d362b5 100644 --- a/tests/test_preprints.py +++ b/tests/test_preprints.py @@ -53,6 +53,7 @@ update_or_enqueue_on_preprint_updated, should_update_preprint_identifiers ) +from conftest import start_mock_send_grid SessionStore = import_module(django_conf_settings.SESSION_ENGINE).SessionStore @@ -1984,6 +1985,8 @@ def test_update_or_enqueue_on_preprint_doi_created(self): assert should_update_preprint_identifiers(self.private_preprint, {}) +@mock.patch('website.mails.settings.USE_EMAIL', True) +@mock.patch('website.mails.settings.USE_CELERY', False) class TestPreprintConfirmationEmails(OsfTestCase): def setUp(self): super().setUp() @@ -1993,31 +1996,16 @@ def setUp(self): self.preprint = PreprintFactory(creator=self.user, project=self.project, provider=PreprintProviderFactory(_id='osf'), is_published=False) self.preprint.add_contributor(self.write_contrib, permissions=WRITE) self.preprint_branded = PreprintFactory(creator=self.user, is_published=False) + self.mock_send_grid = start_mock_send_grid(self) - @mock.patch('website.mails.send_mail') - def test_creator_gets_email(self, send_mail): + def test_creator_gets_email(self): self.preprint.set_published(True, auth=Auth(self.user), save=True) domain = self.preprint.provider.domain or settings.DOMAIN - send_mail.assert_called_with( - self.user.email, - mails.REVIEWS_SUBMISSION_CONFIRMATION, - user=self.user, - provider_url=f'{domain}preprints/{self.preprint.provider._id}', - domain=domain, - provider_contact_email=settings.OSF_CONTACT_EMAIL, - provider_support_email=settings.OSF_SUPPORT_EMAIL, - workflow=None, - reviewable=self.preprint, - is_creator=True, - provider_name=self.preprint.provider.name, - no_future_emails=[], - logo=settings.OSF_PREPRINTS_LOGO, - document_type=self.preprint.provider.preprint_word, - ) - assert send_mail.call_count == 1 + self.mock_send_grid.assert_called() + assert self.mock_send_grid.call_count == 1 self.preprint_branded.set_published(True, auth=Auth(self.user), save=True) - assert send_mail.call_count == 2 + assert self.mock_send_grid.call_count == 2 class TestPreprintOsfStorage(OsfTestCase): diff --git a/tests/test_project_creation_view.py b/tests/test_project_creation_view.py index b2b0aeae788..da6fa8ac76a 100644 --- a/tests/test_project_creation_view.py +++ b/tests/test_project_creation_view.py @@ -7,7 +7,6 @@ from osf.utils import permissions from osf_tests.factories import ( AuthUserFactory, - OSFGroupFactory, ProjectFactory, ProjectWithAddonFactory, ) @@ -114,10 +113,8 @@ def test_create_component_with_contributors_read_write(self): url = web_url_for('project_new_node', pid=self.project._id) non_admin = AuthUserFactory() read_user = AuthUserFactory() - group = OSFGroupFactory(creator=read_user) self.project.add_contributor(non_admin, permissions=permissions.WRITE) self.project.add_contributor(read_user, permissions=permissions.READ) - self.project.add_osf_group(group, permissions.ADMIN) self.project.save() post_data = {'title': 'New Component With Contributors Title', 'category': '', 'inherit_contributors': True} res = self.app.post(url, data=post_data, auth=non_admin.auth) @@ -136,8 +133,6 @@ def test_create_component_with_contributors_read_write(self): assert child.has_permission(read_user, permissions.ADMIN) is False assert child.has_permission(read_user, permissions.WRITE) is False assert child.has_permission(read_user, permissions.READ) is True - # User creating the component was not a manager on the group - assert group not in child.osf_groups # check redirect url assert '/contributors/' in res.location @@ -145,10 +140,8 @@ def test_group_copied_over_to_component_if_manager(self): url = web_url_for('project_new_node', pid=self.project._id) non_admin = AuthUserFactory() write_user = AuthUserFactory() - group = OSFGroupFactory(creator=write_user) self.project.add_contributor(non_admin, permissions=permissions.WRITE) self.project.add_contributor(write_user, permissions=permissions.WRITE) - self.project.add_osf_group(group, permissions.ADMIN) self.project.save() post_data = {'title': 'New Component With Contributors Title', 'category': '', 'inherit_contributors': True} res = self.app.post(url, data=post_data, auth=write_user.auth) @@ -166,8 +159,6 @@ def test_group_copied_over_to_component_if_manager(self): assert child.has_permission(write_user, permissions.ADMIN) is True assert child.has_permission(write_user, permissions.WRITE) is True assert child.has_permission(write_user, permissions.READ) is True - # User creating the component was a manager of the group, so group copied - assert group in child.osf_groups # check redirect url assert '/contributors/' in res.location @@ -260,4 +251,3 @@ def test_project_new_from_template_contributor(self): url = api_url_for('project_new_from_template', nid=project._id) res = self.app.post(url, auth=contributor.auth) assert res.status_code == 201 - diff --git a/tests/test_registrations/base.py b/tests/test_registrations/base.py index 61e86cc767d..60d8b855808 100644 --- a/tests/test_registrations/base.py +++ b/tests/test_registrations/base.py @@ -9,7 +9,7 @@ from osf.models import RegistrationSchema from tests.base import OsfTestCase -from osf_tests.factories import AuthUserFactory, ProjectFactory, DraftRegistrationFactory, OSFGroupFactory +from osf_tests.factories import AuthUserFactory, ProjectFactory, DraftRegistrationFactory class RegistrationsTestBase(OsfTestCase): def setUp(self): @@ -26,9 +26,6 @@ def setUp(self): save=True ) self.non_contrib = AuthUserFactory() - self.group_mem = AuthUserFactory() - self.group = OSFGroupFactory(creator=self.group_mem) - self.node.add_osf_group(self.group, permissions.ADMIN) self.meta_schema = RegistrationSchema.objects.get(name='Open-Ended Registration', schema_version=2) diff --git a/tests/test_registrations/test_embargoes.py b/tests/test_registrations/test_embargoes.py index a05dc2bea18..4c310eecd79 100644 --- a/tests/test_registrations/test_embargoes.py +++ b/tests/test_registrations/test_embargoes.py @@ -29,6 +29,7 @@ from osf.models.sanctions import SanctionCallbackMixin, Embargo from osf.utils import permissions from osf.models import Registration, Contributor, OSFUser, SpamStatus +from conftest import start_mock_send_grid DUMMY_TOKEN = tokens.encode({ 'dummy': 'token' @@ -1059,6 +1060,8 @@ def test_GET_from_authorized_user_with_registration_rej_token_deleted_node(self) @pytest.mark.enable_bookmark_creation +@mock.patch('website.mails.settings.USE_EMAIL', True) +@mock.patch('website.mails.settings.USE_CELERY', False) class RegistrationEmbargoViewsTestCase(OsfTestCase): def setUp(self): super().setUp() @@ -1098,6 +1101,8 @@ def setUp(self): } }) + self.mock_send_grid = start_mock_send_grid(self) + @mock.patch('osf.models.sanctions.EmailApprovableSanction.ask') def test_embargoed_registration_set_privacy_requests_embargo_termination(self, mock_ask): @@ -1131,8 +1136,7 @@ def test_cannot_request_termination_on_component_of_embargo(self): with pytest.raises(NodeStateError): reg._nodes.first().request_embargo_termination(node.creator) - @mock.patch('website.mails.send_mail') - def test_embargoed_registration_set_privacy_sends_mail(self, mock_send_mail): + def test_embargoed_registration_set_privacy_sends_mail(self): """ Integration test for https://github.com/CenterForOpenScience/osf.io/pull/5294#issuecomment-212613668 """ @@ -1156,7 +1160,7 @@ def test_embargoed_registration_set_privacy_sends_mail(self, mock_send_mail): if Contributor.objects.get(user_id=contributor.id, node_id=self.registration.id).permission == permissions.ADMIN: admin_contributors.append(contributor) for admin in admin_contributors: - assert any([each[0][0] == admin.username for each in mock_send_mail.call_args_list]) + assert any([each[1]['to_addr'] == admin.username for each in self.mock_send_grid.call_args_list]) @mock.patch('osf.models.sanctions.EmailApprovableSanction.ask') def test_make_child_embargoed_registration_public_asks_all_admins_in_tree(self, mock_ask): diff --git a/tests/test_registrations/test_retractions.py b/tests/test_registrations/test_retractions.py index 83b77f3a781..f2586b5cec6 100644 --- a/tests/test_registrations/test_retractions.py +++ b/tests/test_registrations/test_retractions.py @@ -15,7 +15,7 @@ from osf_tests.factories import ( AuthUserFactory, NodeFactory, ProjectFactory, RegistrationFactory, UserFactory, UnconfirmedUserFactory, - UnregUserFactory, OSFGroupFactory + UnregUserFactory ) from osf.utils import tokens from osf.exceptions import ( @@ -24,6 +24,7 @@ ) from osf.models import Contributor, Retraction from osf.utils import permissions +from conftest import start_mock_send_grid @@ -197,15 +198,6 @@ def test_non_admin_approval_token_raises_PermissionsError(self): assert self.registration.is_pending_retraction assert not self.registration.is_retracted - # group admin on node cannot retract registration - group_mem = AuthUserFactory() - group = OSFGroupFactory(creator=group_mem) - self.registration.registered_from.add_osf_group(group, permissions.ADMIN) - with pytest.raises(PermissionsError): - self.registration.retraction.approve_retraction(group_mem, approval_token) - assert self.registration.is_pending_retraction - assert not self.registration.is_retracted - def test_one_approval_with_one_admin_retracts(self): self.registration.retract_registration(self.user) self.registration.save() @@ -773,9 +765,7 @@ def setUp(self): self.retraction_get_url = self.registration.web_url_for('node_registration_retraction_get') self.justification = fake.sentence() - self.group_mem = AuthUserFactory() - self.group = OSFGroupFactory(creator=self.group_mem) - self.registration.registered_from.add_osf_group(self.group, permissions.ADMIN) + self.mock_send_grid = start_mock_send_grid(self) def test_GET_retraction_page_when_pending_retraction_returns_HTTPError_BAD_REQUEST(self): self.registration.retract_registration(self.user) @@ -800,8 +790,7 @@ def test_POST_retraction_to_private_registration_returns_HTTPError_FORBIDDEN(sel self.registration.reload() assert self.registration.retraction is None - @mock.patch('website.mails.send_mail') - def test_POST_retraction_does_not_send_email_to_unregistered_admins(self, mock_send_mail): + def test_POST_retraction_does_not_send_email_to_unregistered_admins(self): unreg = UnregUserFactory() self.registration.add_unregistered_contributor( unreg.fullname, @@ -817,7 +806,7 @@ def test_POST_retraction_does_not_send_email_to_unregistered_admins(self, mock_s auth=self.user.auth, ) # Only the creator gets an email; the unreg user does not get emailed - assert mock_send_mail.call_count == 1 + assert self.mock_send_grid.call_count == 1 def test_POST_pending_embargo_returns_HTTPError_HTTPOK(self): self.registration.embargo_registration( @@ -865,12 +854,7 @@ def test_POST_retraction_by_non_admin_retract_HTTPError_UNAUTHORIZED(self): self.registration.reload() assert self.registration.retraction is None - # group admin POST fails - res = self.app.post(self.retraction_post_url, auth=self.group_mem.auth) - assert res.status_code == http_status.HTTP_403_FORBIDDEN - - @mock.patch('website.mails.send_mail') - def test_POST_retraction_without_justification_returns_HTTPOK(self, mock_send): + def test_POST_retraction_without_justification_returns_HTTPOK(self): res = self.app.post( self.retraction_post_url, json={'justification': ''}, @@ -882,8 +866,7 @@ def test_POST_retraction_without_justification_returns_HTTPOK(self, mock_send): assert self.registration.is_pending_retraction assert self.registration.retraction.justification is None - @mock.patch('website.mails.send_mail') - def test_valid_POST_retraction_adds_to_parent_projects_log(self, mock_send): + def test_valid_POST_retraction_adds_to_parent_projects_log(self): initial_project_logs = self.registration.registered_from.logs.count() self.app.post( self.retraction_post_url, @@ -894,8 +877,7 @@ def test_valid_POST_retraction_adds_to_parent_projects_log(self, mock_send): # Logs: Created, registered, retraction initiated assert self.registration.registered_from.logs.count() == initial_project_logs + 1 - @mock.patch('website.mails.send_mail') - def test_valid_POST_retraction_when_pending_retraction_raises_400(self, mock_send): + def test_valid_POST_retraction_when_pending_retraction_raises_400(self): self.app.post( self.retraction_post_url, json={'justification': ''}, @@ -908,16 +890,13 @@ def test_valid_POST_retraction_when_pending_retraction_raises_400(self, mock_sen ) assert res.status_code == 400 - @mock.patch('website.mails.send_mail') - def test_valid_POST_calls_send_mail_with_username(self, mock_send): + def test_valid_POST_calls_send_mail_with_username(self): self.app.post( self.retraction_post_url, json={'justification': ''}, auth=self.user.auth, ) - assert mock_send.called - args, kwargs = mock_send.call_args - assert self.user.username in args + assert self.mock_send_grid.called def test_non_contributor_GET_approval_returns_HTTPError_FORBIDDEN(self): non_contributor = AuthUserFactory() @@ -930,10 +909,6 @@ def test_non_contributor_GET_approval_returns_HTTPError_FORBIDDEN(self): assert self.registration.is_pending_retraction assert not self.registration.is_retracted - # group admin on node fails disapproval GET - res = self.app.get(approval_url, auth=self.group_mem.auth) - assert res.status_code == http_status.HTTP_403_FORBIDDEN - def test_non_contributor_GET_disapproval_returns_HTTPError_FORBIDDEN(self): non_contributor = AuthUserFactory() self.registration.retract_registration(self.user) @@ -944,7 +919,3 @@ def test_non_contributor_GET_disapproval_returns_HTTPError_FORBIDDEN(self): assert res.status_code == http_status.HTTP_403_FORBIDDEN assert self.registration.is_pending_retraction assert not self.registration.is_retracted - - # group admin on node fails disapproval GET - res = self.app.get(disapproval_url, auth=self.group_mem.auth) - assert res.status_code == http_status.HTTP_403_FORBIDDEN diff --git a/tests/test_registrations/test_views.py b/tests/test_registrations/test_views.py index 34c7577540e..034b7b31ae4 100644 --- a/tests/test_registrations/test_views.py +++ b/tests/test_registrations/test_views.py @@ -343,10 +343,6 @@ def test_update_draft_registration_non_admin(self): res = self.app.put(url, json=payload, auth=self.non_admin.auth) assert res.status_code == http_status.HTTP_403_FORBIDDEN - # group admin cannot update draft registration - res = self.app.put(url, json=payload, auth=self.group_mem.auth) - assert res.status_code == http_status.HTTP_403_FORBIDDEN - def test_delete_draft_registration(self): assert 1 == DraftRegistration.objects.filter(deleted__isnull=True).count() url = self.node.api_url_for('delete_draft_registration', draft_id=self.draft._id) @@ -363,10 +359,6 @@ def test_delete_draft_registration_non_admin(self): assert res.status_code == http_status.HTTP_403_FORBIDDEN assert 1 == DraftRegistration.objects.filter(deleted__isnull=True).count() - # group admin cannot delete draft registration - res = self.app.delete(url, auth=self.group_mem.auth) - assert res.status_code == http_status.HTTP_403_FORBIDDEN - @mock.patch('website.archiver.tasks.archive') def test_delete_draft_registration_registered(self, mock_register_draft): self.draft.register(auth=self.auth, save=True) diff --git a/tests/test_serializers.py b/tests/test_serializers.py index f86e983cbd3..08f92f7d232 100644 --- a/tests/test_serializers.py +++ b/tests/test_serializers.py @@ -7,7 +7,6 @@ UserFactory, RegistrationFactory, NodeFactory, - OSFGroupFactory, CollectionFactory, ) from osf.models import NodeRelation @@ -187,23 +186,6 @@ def test_serialize_node_summary_child_exists(self): result = _view_project(parent_node, Auth(user)) assert result['node']['child_exists'] == True - def test_serialize_node_summary_is_contributor_osf_group(self): - project = ProjectFactory() - user = UserFactory() - group = OSFGroupFactory(creator=user) - project.add_osf_group(group, permissions.WRITE) - - res = _view_project( - project, auth=Auth(user), - ) - assert not res['user']['is_contributor'] - assert res['user']['is_contributor_or_group_member'] - assert not res['user']['is_admin'] - assert res['user']['can_edit'] - assert res['user']['has_read_permissions'] - assert set(res['user']['permissions']) == {permissions.READ, permissions.WRITE} - assert res['user']['can_comment'] - def test_serialize_node_search_returns_only_visible_contributors(self): node = NodeFactory() non_visible_contributor = UserFactory() diff --git a/tests/test_spam_mixin.py b/tests/test_spam_mixin.py index a97bc288e44..0713d0b4c54 100644 --- a/tests/test_spam_mixin.py +++ b/tests/test_spam_mixin.py @@ -15,8 +15,8 @@ @pytest.mark.django_db -@mock.patch('framework.auth.views.mails.send_mail') -def test_throttled_autoban(mock_mail): +@pytest.mark.usefixtures('mock_send_grid') +def test_throttled_autoban(mock_send_grid): settings.SPAM_THROTTLE_AUTOBAN = True user = AuthUserFactory() projects = [] @@ -25,11 +25,7 @@ def test_throttled_autoban(mock_mail): proj.flag_spam() proj.save() projects.append(proj) - mock_mail.assert_called_with(osf_support_email=settings.OSF_SUPPORT_EMAIL, - can_change_preferences=False, - to_addr=user.username, - user=user, - mail=mails.SPAM_USER_BANNED) + mock_send_grid.assert_called() user.reload() assert user.is_disabled for project in projects: diff --git a/tests/test_user_profile_view.py b/tests/test_user_profile_view.py index 3e1c455c078..bb801340423 100644 --- a/tests/test_user_profile_view.py +++ b/tests/test_user_profile_view.py @@ -1,103 +1,32 @@ #!/usr/bin/env python3 """Views tests for the OSF.""" -from unittest.mock import MagicMock, ANY -from urllib import parse - -import datetime as dt -import time -import unittest from hashlib import md5 -from http.cookies import SimpleCookie from unittest import mock -from urllib.parse import quote_plus import pytest -from django.core.exceptions import ValidationError -from django.utils import timezone -from flask import request, g -from lxml import html -from pytest import approx from rest_framework import status as http_status from addons.github.tests.factories import GitHubAccountFactory -from addons.osfstorage import settings as osfstorage_settings -from addons.wiki.models import WikiPage -from framework import auth -from framework.auth import Auth, authenticate, cas, core -from framework.auth.campaigns import ( - get_campaigns, - is_institution_login, - is_native_login, - is_proxy_login, - campaign_url_for -) -from framework.auth.exceptions import InvalidTokenError -from framework.auth.utils import impute_names_model, ensure_external_identity_uniqueness -from framework.auth.views import login_and_register_handler from framework.celery_tasks import handlers -from framework.exceptions import HTTPError, TemplateHTTPError -from framework.flask import redirect -from framework.transactions.handlers import no_auto_transaction from osf.external.spam import tasks as spam_tasks from osf.models import ( - Comment, - AbstractNode, - OSFUser, - Tag, - SpamStatus, - NodeRelation, NotableDomain ) -from osf.utils import permissions from osf_tests.factories import ( fake_email, ApiOAuth2ApplicationFactory, ApiOAuth2PersonalTokenFactory, AuthUserFactory, - CollectionFactory, - CommentFactory, - NodeFactory, - OSFGroupFactory, - PreprintFactory, - PreprintProviderFactory, - PrivateLinkFactory, - ProjectFactory, - ProjectWithAddonFactory, - RegistrationProviderFactory, - UserFactory, - UnconfirmedUserFactory, - UnregUserFactory, RegionFactory, - DraftRegistrationFactory, ) from tests.base import ( - assert_is_redirect, - capture_signals, fake, - get_default_metaschema, OsfTestCase, - assert_datetime_equal, - test_app ) -from tests.test_cas_authentication import generate_external_user_with_resp -from tests.utils import run_celery_tasks -from website import mailchimp_utils, mails, settings, language -from website.profile.utils import add_contributor_json, serialize_unregistered -from website.profile.views import update_osf_help_mails_subscription -from website.project.decorators import check_can_access -from website.project.model import has_anonymous_link -from website.project.signals import contributor_added -from website.project.views.contributor import ( - deserialize_contributors, - notify_added_contributor, - send_claim_email, - send_claim_registered_email, -) -from website.project.views.node import _should_show_wiki_widget, abbrev_authors +from website import mailchimp_utils from website.settings import MAILCHIMP_GENERAL_LIST from website.util import api_url_for, web_url_for -from website.util import rubeus -from website.util.metrics import OsfSourceTags, OsfClaimedTags, provider_source_tag, provider_claimed_tag +from conftest import start_mock_send_grid @pytest.mark.enable_enqueue_task @@ -413,8 +342,7 @@ def test_cannot_update_user_without_user_id(self): assert res.status_code == 400 assert res.json['message_long'] == '"id" is required' - @mock.patch('framework.auth.views.mails.send_mail') - def test_add_emails_return_emails(self, send_mail): + def test_add_emails_return_emails(self): user1 = AuthUserFactory() url = api_url_for('update_user') email = 'test@cos.io' @@ -427,8 +355,7 @@ def test_add_emails_return_emails(self, send_mail): assert 'emails' in res.json['profile'] assert len(res.json['profile']['emails']) == 2 - @mock.patch('framework.auth.views.mails.send_mail') - def test_resend_confirmation_return_emails(self, send_mail): + def test_resend_confirmation_return_emails(self): user1 = AuthUserFactory() url = api_url_for('resend_confirmation') email = 'test@cos.io' @@ -440,9 +367,8 @@ def test_resend_confirmation_return_emails(self, send_mail): assert 'emails' in res.json['profile'] assert len(res.json['profile']['emails']) == 2 - @mock.patch('framework.auth.views.mails.send_mail') @mock.patch('website.mailchimp_utils.get_mailchimp_api') - def test_update_user_mailing_lists(self, mock_get_mailchimp_api, send_mail): + def test_update_user_mailing_lists(self, mock_get_mailchimp_api): email = fake_email() email_hash = md5(email.lower().encode()).hexdigest() self.user.emails.create(address=email) @@ -485,9 +411,8 @@ def test_update_user_mailing_lists(self, mock_get_mailchimp_api, send_mail): ) handlers.celery_teardown_request() - @mock.patch('framework.auth.views.mails.send_mail') @mock.patch('website.mailchimp_utils.get_mailchimp_api') - def test_unsubscribe_mailchimp_not_called_if_user_not_subscribed(self, mock_get_mailchimp_api, send_mail): + def test_unsubscribe_mailchimp_not_called_if_user_not_subscribed(self, mock_get_mailchimp_api): email = fake_email() self.user.emails.create(address=email) list_name = MAILCHIMP_GENERAL_LIST @@ -589,6 +514,8 @@ def setUp(self): self.user.auth = (self.user.username, 'password') self.user.save() + self.mock_send_grid = start_mock_send_grid(self) + def test_password_change_valid(self, old_password='password', new_password='Pa$$w0rd', @@ -793,14 +720,15 @@ def test_password_change_invalid_empty_string_confirm_password(self): def test_password_change_invalid_blank_confirm_password(self): self.test_password_change_invalid_blank_password('password', 'new password', ' ') - @mock.patch('framework.auth.views.mails.send_mail') - def test_user_cannot_request_account_export_before_throttle_expires(self, send_mail): + @mock.patch('website.mails.settings.USE_EMAIL', True) + @mock.patch('website.mails.settings.USE_CELERY', False) + def test_user_cannot_request_account_export_before_throttle_expires(self): url = api_url_for('request_export') self.app.post(url, auth=self.user.auth) - assert send_mail.called + assert self.mock_send_grid.called res = self.app.post(url, auth=self.user.auth) assert res.status_code == 400 - assert send_mail.call_count == 1 + assert self.mock_send_grid.call_count == 1 def test_get_unconfirmed_emails_exclude_external_identity(self): external_identity = { diff --git a/tests/test_webtests.py b/tests/test_webtests.py index e06be14a093..ae1a30e7618 100644 --- a/tests/test_webtests.py +++ b/tests/test_webtests.py @@ -36,6 +36,7 @@ from addons.wiki.tests.factories import WikiFactory, WikiVersionFactory from website import language from website.util import web_url_for, api_url_for +from conftest import start_mock_send_grid logging.getLogger('website.project.model').setLevel(logging.ERROR) @@ -714,6 +715,8 @@ def test_claim_user_registered_preprint_with_correct_password(self): assert preprint not in unreg_user.unclaimed_records +@mock.patch('website.mails.settings.USE_EMAIL', True) +@mock.patch('website.mails.settings.USE_CELERY', False) class TestResendConfirmation(OsfTestCase): def setUp(self): @@ -723,6 +726,8 @@ def setUp(self): self.get_url = web_url_for('resend_confirmation_get') self.post_url = web_url_for('resend_confirmation_post') + self.mock_send_grid = start_mock_send_grid(self) + # test that resend confirmation page is load correctly def test_resend_confirmation_get(self): res = self.app.get(self.get_url) @@ -731,8 +736,7 @@ def test_resend_confirmation_get(self): assert res.get_form('resendForm') # test that unconfirmed user can receive resend confirmation email - @mock.patch('framework.auth.views.mails.send_mail') - def test_can_receive_resend_confirmation_email(self, mock_send_mail): + def test_can_receive_resend_confirmation_email(self): # load resend confirmation page and submit email res = self.app.get(self.get_url) form = res.get_form('resendForm') @@ -740,14 +744,13 @@ def test_can_receive_resend_confirmation_email(self, mock_send_mail): res = form.submit(self.app) # check email, request and response - assert mock_send_mail.called + assert self.mock_send_grid.called assert res.status_code == 200 assert res.request.path == self.post_url assert_in_html('If there is an OSF account', res.text) # test that confirmed user cannot receive resend confirmation email - @mock.patch('framework.auth.views.mails.send_mail') - def test_cannot_receive_resend_confirmation_email_1(self, mock_send_mail): + def test_cannot_receive_resend_confirmation_email_1(self): # load resend confirmation page and submit email res = self.app.get(self.get_url) form = res.get_form('resendForm') @@ -755,14 +758,13 @@ def test_cannot_receive_resend_confirmation_email_1(self, mock_send_mail): res = form.submit(self.app) # check email, request and response - assert not mock_send_mail.called + assert not self.mock_send_grid.called assert res.status_code == 200 assert res.request.path == self.post_url assert_in_html('has already been confirmed', res.text) # test that non-existing user cannot receive resend confirmation email - @mock.patch('framework.auth.views.mails.send_mail') - def test_cannot_receive_resend_confirmation_email_2(self, mock_send_mail): + def test_cannot_receive_resend_confirmation_email_2(self): # load resend confirmation page and submit email res = self.app.get(self.get_url) form = res.get_form('resendForm') @@ -770,14 +772,13 @@ def test_cannot_receive_resend_confirmation_email_2(self, mock_send_mail): res = form.submit(self.app) # check email, request and response - assert not mock_send_mail.called + assert not self.mock_send_grid.called assert res.status_code == 200 assert res.request.path == self.post_url assert_in_html('If there is an OSF account', res.text) # test that user cannot submit resend confirmation request too quickly - @mock.patch('framework.auth.views.mails.send_mail') - def test_cannot_resend_confirmation_twice_quickly(self, mock_send_mail): + def test_cannot_resend_confirmation_twice_quickly(self): # load resend confirmation page and submit email res = self.app.get(self.get_url) form = res.get_form('resendForm') @@ -790,6 +791,8 @@ def test_cannot_resend_confirmation_twice_quickly(self, mock_send_mail): assert_in_html('Please wait', res.text) +@mock.patch('website.mails.settings.USE_EMAIL', True) +@mock.patch('website.mails.settings.USE_CELERY', False) class TestForgotPassword(OsfTestCase): def setUp(self): @@ -801,6 +804,8 @@ def setUp(self): self.user.verification_key_v2 = {} self.user.save() + self.mock_send_grid = start_mock_send_grid(self) + # log users out before they land on forgot password page def test_forgot_password_logs_out_user(self): # visit forgot password link while another user is logged in @@ -820,8 +825,7 @@ def test_get_forgot_password(self): assert res.get_form('forgotPasswordForm') # test that existing user can receive reset password email - @mock.patch('framework.auth.views.mails.send_mail') - def test_can_receive_reset_password_email(self, mock_send_mail): + def test_can_receive_reset_password_email(self): # load forgot password page and submit email res = self.app.get(self.get_url) form = res.get_form('forgotPasswordForm') @@ -829,7 +833,7 @@ def test_can_receive_reset_password_email(self, mock_send_mail): res = form.submit(self.app) # check mail was sent - assert mock_send_mail.called + assert self.mock_send_grid.called # check http 200 response assert res.status_code == 200 # check request URL is /forgotpassword @@ -843,8 +847,7 @@ def test_can_receive_reset_password_email(self, mock_send_mail): assert self.user.verification_key_v2 != {} # test that non-existing user cannot receive reset password email - @mock.patch('framework.auth.views.mails.send_mail') - def test_cannot_receive_reset_password_email(self, mock_send_mail): + def test_cannot_receive_reset_password_email(self): # load forgot password page and submit email res = self.app.get(self.get_url) form = res.get_form('forgotPasswordForm') @@ -852,7 +855,7 @@ def test_cannot_receive_reset_password_email(self, mock_send_mail): res = form.submit(self.app) # check mail was not sent - assert not mock_send_mail.called + assert not self.mock_send_grid.called # check http 200 response assert res.status_code == 200 # check request URL is /forgotpassword @@ -866,8 +869,7 @@ def test_cannot_receive_reset_password_email(self, mock_send_mail): assert self.user.verification_key_v2 == {} # test that non-existing user cannot receive reset password email - @mock.patch('framework.auth.views.mails.send_mail') - def test_not_active_user_no_reset_password_email(self, mock_send_mail): + def test_not_active_user_no_reset_password_email(self): self.user.deactivate_account() self.user.save() @@ -878,7 +880,7 @@ def test_not_active_user_no_reset_password_email(self, mock_send_mail): res = form.submit(self.app) # check mail was not sent - assert not mock_send_mail.called + assert not self.mock_send_grid.called # check http 200 response assert res.status_code == 200 # check request URL is /forgotpassword @@ -892,8 +894,7 @@ def test_not_active_user_no_reset_password_email(self, mock_send_mail): assert self.user.verification_key_v2 == {} # test that user cannot submit forgot password request too quickly - @mock.patch('framework.auth.views.mails.send_mail') - def test_cannot_reset_password_twice_quickly(self, mock_send_mail): + def test_cannot_reset_password_twice_quickly(self): # load forgot password page and submit email res = self.app.get(self.get_url) form = res.get_form('forgotPasswordForm') @@ -908,6 +909,8 @@ def test_cannot_reset_password_twice_quickly(self, mock_send_mail): assert_not_in_html('If there is an OSF account', res.text) +@mock.patch('website.mails.settings.USE_EMAIL', True) +@mock.patch('website.mails.settings.USE_CELERY', False) class TestForgotPasswordInstitution(OsfTestCase): def setUp(self): @@ -919,6 +922,8 @@ def setUp(self): self.user.verification_key_v2 = {} self.user.save() + self.mock_send_grid = start_mock_send_grid(self) + # log users out before they land on institutional forgot password page def test_forgot_password_logs_out_user(self): # TODO: check in qa url encoding @@ -939,13 +944,12 @@ def test_get_forgot_password(self): assert 'campaign=unsupportedinstitution' in location # test that user from disabled institution can receive reset password email - @mock.patch('framework.auth.views.mails.send_mail') - def test_can_receive_reset_password_email(self, mock_send_mail): + def test_can_receive_reset_password_email(self): # submit email to institutional forgot-password page res = self.app.post(self.post_url, data={'forgot_password-email': self.user.username}) # check mail was sent - assert mock_send_mail.called + assert self.mock_send_grid.called # check http 200 response assert res.status_code == 200 # check request URL is /forgotpassword @@ -959,13 +963,12 @@ def test_can_receive_reset_password_email(self, mock_send_mail): assert self.user.verification_key_v2 != {} # test that non-existing user cannot receive reset password email - @mock.patch('framework.auth.views.mails.send_mail') - def test_cannot_receive_reset_password_email(self, mock_send_mail): + def test_cannot_receive_reset_password_email(self): # load forgot password page and submit email res = self.app.post(self.post_url, data={'forgot_password-email': 'fake' + self.user.username}) # check mail was not sent - assert not mock_send_mail.called + assert not self.mock_send_grid.called # check http 200 response assert res.status_code == 200 # check request URL is /forgotpassword-institution @@ -979,15 +982,14 @@ def test_cannot_receive_reset_password_email(self, mock_send_mail): assert self.user.verification_key_v2 == {} # test that non-existing user cannot receive institutional reset password email - @mock.patch('framework.auth.views.mails.send_mail') - def test_not_active_user_no_reset_password_email(self, mock_send_mail): + def test_not_active_user_no_reset_password_email(self): self.user.deactivate_account() self.user.save() res = self.app.post(self.post_url, data={'forgot_password-email': self.user.username}) # check mail was not sent - assert not mock_send_mail.called + assert not self.mock_send_grid.called # check http 200 response assert res.status_code == 200 # check request URL is /forgotpassword-institution @@ -1001,8 +1003,7 @@ def test_not_active_user_no_reset_password_email(self, mock_send_mail): assert self.user.verification_key_v2 == {} # test that user cannot submit forgot password request too quickly - @mock.patch('framework.auth.views.mails.send_mail') - def test_cannot_reset_password_twice_quickly(self, mock_send_mail): + def test_cannot_reset_password_twice_quickly(self): # submit institutional forgot-password request in rapid succession res = self.app.post(self.post_url, data={'forgot_password-email': self.user.username}) res = self.app.post(self.post_url, data={'forgot_password-email': self.user.username}) diff --git a/website/conferences/signals.py b/website/conferences/signals.py deleted file mode 100644 index ef4459bcf16..00000000000 --- a/website/conferences/signals.py +++ /dev/null @@ -1,5 +0,0 @@ -import blinker - -signals = blinker.Namespace() - -osf4m_user_created = signals.signal('osf4m-user-created') diff --git a/website/conferences/views.py b/website/conferences/views.py index 4f3e7cd79ee..cf7dbfd6d3b 100644 --- a/website/conferences/views.py +++ b/website/conferences/views.py @@ -13,7 +13,7 @@ from osf import features from osf.models import AbstractNode, Node, Conference, OSFUser from website import settings -from website.conferences import utils, signals +from website.conferences import utils from website.conferences.message import ConferenceMessage, ConferenceError from website.ember_osf_web.decorators import ember_flag_is_active from website.mails import CONFERENCE_SUBMITTED, CONFERENCE_INACTIVE, CONFERENCE_FAILED, CONFERENCE_DEPRECATION @@ -154,8 +154,6 @@ def add_poster_by_email(conference, message): can_change_preferences=False, logo=settings.OSF_MEETINGS_LOGO ) - if user_created: - signals.osf4m_user_created.send(user, conference=conference, node=node) def conference_data(meeting): try: diff --git a/website/ember_osf_web/views.py b/website/ember_osf_web/views.py index ce8e1978a89..84f23ad8327 100644 --- a/website/ember_osf_web/views.py +++ b/website/ember_osf_web/views.py @@ -8,8 +8,6 @@ ember_osf_web_dir = os.path.abspath(os.path.join(os.getcwd(), EXTERNAL_EMBER_APPS['ember_osf_web']['path'])) routes = [ - '/quickfiles/', - '//quickfiles/', '/institutions/', ] diff --git a/website/mails/listeners.py b/website/mails/listeners.py index 8304559d9ba..3f411d52f87 100644 --- a/website/mails/listeners.py +++ b/website/mails/listeners.py @@ -7,7 +7,6 @@ from website import settings from framework.auth import signals as auth_signals from website.project import signals as project_signals -from website.conferences import signals as conference_signals @auth_signals.unconfirmed_user_created.connect @@ -43,21 +42,3 @@ def queue_first_public_project_email(user, node, meeting_creation): project_title=node.title, osf_support_email=settings.OSF_SUPPORT_EMAIL, ) - -@conference_signals.osf4m_user_created.connect -def queue_osf4m_welcome_email(user, conference, node): - """Queue an email once a new user is created for OSF Meetings""" - from osf.models.queued_mail import queue_mail, WELCOME_OSF4M - root = (node.get_addon('osfstorage')).get_root() - root_children = [child for child in root.children if child.is_file] - queue_mail( - to_addr=user.username, - mail=WELCOME_OSF4M, - send_at=timezone.now() + settings.WELCOME_OSF4M_WAIT_TIME, - user=user, - conference=conference.name, - fullname=user.fullname, - fid=root_children[0]._id if len(root_children) else None, - osf_support_email=settings.OSF_SUPPORT_EMAIL, - domain=settings.DOMAIN, - ) diff --git a/website/mails/mails.py b/website/mails/mails.py index 61c466fdfb8..b98b7c37b87 100644 --- a/website/mails/mails.py +++ b/website/mails/mails.py @@ -26,6 +26,7 @@ from framework.email import tasks from osf import features from website import settings +from django.core.mail import EmailMessage, get_connection logger = logging.getLogger(__name__) @@ -75,6 +76,34 @@ def render_message(tpl_name, **context): return tpl.render(**context) +def send_to_mailhog(subject, message, from_email, to_email, attachment_name=None, attachment_content=None): + email = EmailMessage( + subject=subject, + body=message, + from_email=from_email, + to=[to_email], + connection=get_connection( + backend='django.core.mail.backends.smtp.EmailBackend', + host=settings.MAILHOG_HOST, + port=settings.MAILHOG_PORT, + username='', + password='', + use_tls=False, + use_ssl=False, + ) + ) + email.content_subtype = 'html' + + if attachment_name and attachment_content: + email.attach(attachment_name, attachment_content) + + try: + email.send() + except ConnectionRefusedError: + logger.debug('Mailhog is not running. Please start it to send emails.') + return + + def send_mail( to_addr, mail, @@ -119,6 +148,17 @@ def send_mail( logger.debug('Sending email...') logger.debug(f'To: {to_addr}\nFrom: {from_addr}\nSubject: {subject}\nMessage: {message}') + if waffle.switch_is_active(features.ENABLE_MAILHOG): + logger.debug('Intercepting email: sending via MailHog') + send_to_mailhog( + subject=subject, + message=message, + from_email=from_addr, + to_email=to_addr, + attachment_name=attachment_name, + attachment_content=attachment_content + ) + kwargs = dict( from_addr=from_addr, to_addr=to_addr, @@ -511,21 +551,6 @@ def get_english_article(word): subject='Your ${document_type} has been withdrawn', ) -GROUP_MEMBER_ADDED = Mail( - 'group_member_added', - subject='You have been added as a ${permission} of the group ${group_name}', -) - -GROUP_MEMBER_UNREGISTERED_ADDED = Mail( - 'group_member_unregistered_added', - subject='You have been added as a ${permission} of the group ${group_name}', -) - -GROUP_ADDED_TO_NODE = Mail( - 'group_added_to_node', - subject='Your group, ${group_name}, has been added to an OSF Project' -) - WITHDRAWAL_REQUEST_DECLINED = Mail( 'withdrawal_request_declined', subject='Your withdrawal request has been declined', @@ -599,11 +624,6 @@ def get_english_article(word): subject='The updates for ${resource_type} ${title} were not accepted' ) -QUICKFILES_MIGRATED = Mail( - 'quickfiles_migrated', - subject='Your Quick Files have moved' -) - ADDONS_BOA_JOB_COMPLETE = Mail( 'addons_boa_job_complete', subject='Your Boa job has completed' diff --git a/website/maintenance.py b/website/maintenance.py index 98359540cfb..2424651d758 100644 --- a/website/maintenance.py +++ b/website/maintenance.py @@ -42,11 +42,19 @@ def set_maintenance(message, level=1, start=None, end=None): return {'start': state.start, 'end': state.end} + +class InFailedSqlTransaction: + pass + + def get_maintenance(): """Get the current start and end times for the maintenance state. Return None if there is no current maintenance state. """ - maintenance = MaintenanceState.objects.all().first() + try: + maintenance = MaintenanceState.objects.all().first() + except InFailedSqlTransaction: + return None return MaintenanceStateSerializer(maintenance).data if maintenance else None def unset_maintenance(): diff --git a/website/notifications/constants.py b/website/notifications/constants.py index 4068367c505..ce3c9db4315 100644 --- a/website/notifications/constants.py +++ b/website/notifications/constants.py @@ -1,5 +1,4 @@ NODE_SUBSCRIPTIONS_AVAILABLE = { - 'comments': 'Comments added', 'file_updated': 'Files updated' } @@ -7,10 +6,7 @@ # subscription. If no notification type has been assigned, the user subscription # will default to 'email_transactional'. USER_SUBSCRIPTIONS_AVAILABLE = { - 'global_comment_replies': 'Replies to your comments', - 'global_comments': 'Comments added', 'global_file_updated': 'Files updated', - 'global_mentions': 'Mentions added', 'global_reviews': 'Preprint submissions updated' } diff --git a/website/notifications/emails.py b/website/notifications/emails.py index d26d43351d5..56f513920af 100644 --- a/website/notifications/emails.py +++ b/website/notifications/emails.py @@ -2,7 +2,8 @@ from babel import dates, core, Locale -from osf.models import AbstractNode, NotificationDigest, NotificationSubscription +from osf.models import AbstractNode, NotificationSubscriptionLegacy +from osf.models.notifications import NotificationDigest from osf.utils.permissions import ADMIN, READ from website import mails from website.notifications import constants @@ -159,7 +160,7 @@ def check_node(node, event): """Return subscription for a particular node and event.""" node_subscriptions = {key: [] for key in constants.NOTIFICATION_TYPES} if node: - subscription = NotificationSubscription.load(utils.to_subscription_key(node._id, event)) + subscription = NotificationSubscriptionLegacy.load(utils.to_subscription_key(node._id, event)) for notification_type in node_subscriptions: users = getattr(subscription, notification_type, []) if users: @@ -172,7 +173,7 @@ def check_node(node, event): def get_user_subscriptions(user, event): if user.is_disabled: return {} - user_subscription = NotificationSubscription.load(utils.to_subscription_key(user._id, event)) + user_subscription = NotificationSubscriptionLegacy.load(utils.to_subscription_key(user._id, event)) if user_subscription: return {key: list(getattr(user_subscription, key).all().values_list('guids___id', flat=True)) for key in constants.NOTIFICATION_TYPES} else: diff --git a/website/notifications/utils.py b/website/notifications/utils.py index af8275ab5fb..51d487ff67a 100644 --- a/website/notifications/utils.py +++ b/website/notifications/utils.py @@ -91,10 +91,10 @@ def remove_supplemental_node(node): @app.task(max_retries=5, default_retry_delay=60) def remove_subscription_task(node_id): AbstractNode = apps.get_model('osf.AbstractNode') - NotificationSubscription = apps.get_model('osf.NotificationSubscription') + NotificationSubscriptionLegacy = apps.get_model('osf.NotificationSubscriptionLegacy') node = AbstractNode.load(node_id) - NotificationSubscription.objects.filter(node=node).delete() + NotificationSubscriptionLegacy.objects.filter(node=node).delete() parent = node.parent_node if parent and parent.child_node_subscriptions: @@ -144,12 +144,12 @@ def users_to_remove(source_event, source_node, new_node): :param new_node: Node instance where a sub or new sub will be. :return: Dict of notification type lists with user_ids """ - NotificationSubscription = apps.get_model('osf.NotificationSubscription') + NotificationSubscriptionLegacy = apps.get_model('osf.NotificationSubscriptionLegacy') removed_users = {key: [] for key in constants.NOTIFICATION_TYPES} if source_node == new_node: return removed_users - old_sub = NotificationSubscription.load(to_subscription_key(source_node._id, source_event)) - old_node_sub = NotificationSubscription.load(to_subscription_key(source_node._id, + old_sub = NotificationSubscriptionLegacy.load(to_subscription_key(source_node._id, source_event)) + old_node_sub = NotificationSubscriptionLegacy.load(to_subscription_key(source_node._id, '_'.join(source_event.split('_')[-2:]))) if not old_sub and not old_node_sub: return removed_users @@ -172,11 +172,11 @@ def move_subscription(remove_users, source_event, source_node, new_event, new_no :param new_node: Instance of Node :return: Returns a NOTIFICATION_TYPES list of removed users without permissions """ - NotificationSubscription = apps.get_model('osf.NotificationSubscription') + NotificationSubscriptionLegacy = apps.get_model('osf.NotificationSubscriptionLegacy') OSFUser = apps.get_model('osf.OSFUser') if source_node == new_node: return - old_sub = NotificationSubscription.load(to_subscription_key(source_node._id, source_event)) + old_sub = NotificationSubscriptionLegacy.load(to_subscription_key(source_node._id, source_event)) if not old_sub: return elif old_sub: @@ -205,7 +205,6 @@ def get_configured_projects(user): configured_projects = set() user_subscriptions = get_all_user_subscriptions(user, extra=( ~Q(node__type='osf.collection') & - ~Q(node__type='osf.quickfilesnode') & Q(node__is_deleted=False) )) @@ -237,8 +236,8 @@ def check_project_subscriptions_are_all_none(user, node): def get_all_user_subscriptions(user, extra=None): """ Get all Subscription objects that the user is subscribed to""" - NotificationSubscription = apps.get_model('osf.NotificationSubscription') - queryset = NotificationSubscription.objects.filter( + NotificationSubscriptionLegacy = apps.get_model('osf.NotificationSubscriptionLegacy') + queryset = NotificationSubscriptionLegacy.objects.filter( Q(none=user.pk) | Q(email_digest=user.pk) | Q(email_transactional=user.pk) @@ -392,14 +391,14 @@ def get_parent_notification_type(node, event, user): :return: str notification type (e.g. 'email_transactional') """ AbstractNode = apps.get_model('osf.AbstractNode') - NotificationSubscription = apps.get_model('osf.NotificationSubscription') + NotificationSubscriptionLegacy = apps.get_model('osf.NotificationSubscriptionLegacy') if node and isinstance(node, AbstractNode) and node.parent_node and node.parent_node.has_permission(user, READ): parent = node.parent_node key = to_subscription_key(parent._id, event) try: - subscription = NotificationSubscription.objects.get(_id=key) - except NotificationSubscription.DoesNotExist: + subscription = NotificationSubscriptionLegacy.objects.get(_id=key) + except NotificationSubscriptionLegacy.DoesNotExist: return get_parent_notification_type(parent, event, user) for notification_type in constants.NOTIFICATION_TYPES: @@ -429,19 +428,19 @@ def check_if_all_global_subscriptions_are_none(user): # This function predates comment mentions, which is a global_ notification that cannot be disabled # Therefore, an actual check would never return True. # If this changes, an optimized query would look something like: - # not NotificationSubscription.objects.filter(Q(event_name__startswith='global_') & (Q(email_digest=user.pk)|Q(email_transactional=user.pk))).exists() + # not NotificationSubscriptionLegacy.objects.filter(Q(event_name__startswith='global_') & (Q(email_digest=user.pk)|Q(email_transactional=user.pk))).exists() return False def subscribe_user_to_global_notifications(user): - NotificationSubscription = apps.get_model('osf.NotificationSubscription') + NotificationSubscriptionLegacy = apps.get_model('osf.NotificationSubscriptionLegacy') notification_type = 'email_transactional' user_events = constants.USER_SUBSCRIPTIONS_AVAILABLE for user_event in user_events: user_event_id = to_subscription_key(user._id, user_event) # get_or_create saves on creation - subscription, created = NotificationSubscription.objects.get_or_create(_id=user_event_id, user=user, event_name=user_event) + subscription, created = NotificationSubscriptionLegacy.objects.get_or_create(_id=user_event_id, user=user, event_name=user_event) subscription.add_user_to_subscription(user, notification_type) subscription.save() @@ -450,7 +449,7 @@ def subscribe_user_to_notifications(node, user): """ Update the notification settings for the creator or contributors :param user: User to subscribe to notifications """ - NotificationSubscription = apps.get_model('osf.NotificationSubscription') + NotificationSubscriptionLegacy = apps.get_model('osf.NotificationSubscriptionLegacy') Preprint = apps.get_model('osf.Preprint') DraftRegistration = apps.get_model('osf.DraftRegistration') if isinstance(node, Preprint): @@ -476,16 +475,16 @@ def subscribe_user_to_notifications(node, user): for event in events: event_id = to_subscription_key(target_id, event) global_event_id = to_subscription_key(user._id, 'global_' + event) - global_subscription = NotificationSubscription.load(global_event_id) + global_subscription = NotificationSubscriptionLegacy.load(global_event_id) - subscription = NotificationSubscription.load(event_id) + subscription = NotificationSubscriptionLegacy.load(event_id) # If no subscription for component and creator is the user, do not create subscription # If no subscription exists for the component, this means that it should adopt its # parent's settings if not (node and node.parent_node and not subscription and node.creator == user): if not subscription: - subscription = NotificationSubscription(_id=event_id, owner=node, event_name=event) + subscription = NotificationSubscriptionLegacy(_id=event_id, owner=node, event_name=event) # Need to save here in order to access m2m fields subscription.save() if global_subscription: diff --git a/website/notifications/views.py b/website/notifications/views.py index 8ca4775367d..1cbb62ee08d 100644 --- a/website/notifications/views.py +++ b/website/notifications/views.py @@ -6,7 +6,8 @@ from framework.auth.decorators import must_be_logged_in from framework.exceptions import HTTPError -from osf.models import AbstractNode, NotificationSubscription, Registration +from osf.models import AbstractNode, Registration +from osf.models.notifications import NotificationSubscriptionLegacy from osf.utils.permissions import READ from website.notifications import utils from website.notifications.constants import NOTIFICATION_TYPES @@ -95,17 +96,17 @@ def configure_subscription(auth): raise HTTPError(http_status.HTTP_400_BAD_REQUEST) # If adopt_parent make sure that this subscription is None for the current User - subscription = NotificationSubscription.load(event_id) + subscription = NotificationSubscriptionLegacy.load(event_id) if not subscription: return {} # We're done here subscription.remove_user_from_subscription(user) return {} - subscription = NotificationSubscription.load(event_id) + subscription = NotificationSubscriptionLegacy.load(event_id) if not subscription: - subscription = NotificationSubscription(_id=event_id, owner=owner, event_name=event) + subscription = NotificationSubscriptionLegacy(_id=event_id, owner=owner, event_name=event) subscription.save() if node and node._id not in user.notifications_configured: diff --git a/website/osf_groups/__init__.py b/website/osf_groups/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/website/osf_groups/signals.py b/website/osf_groups/signals.py deleted file mode 100644 index 6edd7aa3603..00000000000 --- a/website/osf_groups/signals.py +++ /dev/null @@ -1,7 +0,0 @@ -import blinker - -signals = blinker.Namespace() - -member_added = signals.signal('member-added') -unreg_member_added = signals.signal('unreg-member-added') -group_added_to_node = signals.signal('group-added') diff --git a/website/osf_groups/views.py b/website/osf_groups/views.py deleted file mode 100644 index b8b9d6aa638..00000000000 --- a/website/osf_groups/views.py +++ /dev/null @@ -1,135 +0,0 @@ -import logging - -from framework.utils import get_timestamp, throttle_period_expired - -from website import mails, settings -from website.notifications.exceptions import InvalidSubscriptionError -from website.notifications.utils import ( - check_if_all_global_subscriptions_are_none, - subscribe_user_to_notifications, -) -from website.osf_groups.signals import ( - unreg_member_added, - member_added, - group_added_to_node, -) -logger = logging.getLogger(__name__) - - -@member_added.connect -def notify_added_group_member(group, user, permission, auth=None, throttle=None, email_template='default', *args, **kwargs): - if email_template == 'false': - return - - throttle = throttle or settings.GROUP_MEMBER_ADDED_EMAIL_THROTTLE - - member_record = user.member_added_email_records.get(group._id, {}) - if member_record: - timestamp = member_record.get('last_sent', None) - if timestamp: - if not throttle_period_expired(timestamp, throttle): - return - else: - user.member_added_email_records[group._id] = {} - - if user.is_registered: - email_template = mails.GROUP_MEMBER_ADDED - mails.send_mail( - to_addr=user.username, - mail=email_template, - user=user, - group_name=group.name, - permission=permission, - referrer_name=auth.user.fullname if auth else '', - osf_contact_email=settings.OSF_CONTACT_EMAIL, - ) - user.member_added_email_records[group._id]['last_sent'] = get_timestamp() - user.save() - - else: - unreg_member_added.send(group, user=user, permission=permission, auth=auth, throttle=throttle, email_template=email_template) - - -def send_claim_member_email(email, user, group, permission, auth=None, throttle=None, email_template='default'): - """ - Unregistered user claiming a user account as a group member of an OSFGroup. Send an email for claiming the account. - Sends to the given email - - :param str email: The address given in the claim user form - :param User user: The User record to claim. - :param OSFGroup group: The group where the user claimed their account. - :return - - """ - - claimer_email = email.lower().strip() - claim_url = user.get_claim_url(group._id, external=True) - - throttle = throttle or settings.GROUP_MEMBER_ADDED_EMAIL_THROTTLE - - mails.send_mail( - to_addr=claimer_email, - mail=email_template, - user=user, - group_name=group.name, - referrer_name=auth.user.fullname if auth else '', - permission=permission, - claim_url=claim_url, - osf_contact_email=settings.OSF_CONTACT_EMAIL, - ) - user.member_added_email_records[group._id]['last_sent'] = get_timestamp() - user.save() - - return claimer_email - - -@unreg_member_added.connect -def finalize_invitation(group, user, permission, auth, throttle, email_template='default'): - email_template = mails.GROUP_MEMBER_UNREGISTERED_ADDED - - try: - record = user.get_unclaimed_record(group._id) - except ValueError: - pass - else: - if record['email']: - send_claim_member_email(record['email'], user, group, permission, auth=auth, throttle=throttle, email_template=email_template) - - -@group_added_to_node.connect -def notify_added_node_group_member(group, node, user, permission, auth, throttle=None): - throttle = throttle or settings.GROUP_CONNECTED_EMAIL_THROTTLE - - node_group_record = user.group_connected_email_records.get(group._id, {}) - if node_group_record: - timestamp = node_group_record.get('last_sent', None) - if timestamp: - if not throttle_period_expired(timestamp, throttle): - return - else: - user.group_connected_email_records[group._id] = {} - - if (not auth or auth.user != user) and user.is_registered: - email_template = mails.GROUP_ADDED_TO_NODE - mails.send_mail( - to_addr=user.username, - mail=email_template, - user=user, - node=node, - all_global_subscriptions_none=check_if_all_global_subscriptions_are_none(user), - group_name=group.name, - permission=permission, - referrer_name=auth.user.fullname if auth else '', - osf_contact_email=settings.OSF_CONTACT_EMAIL, - ) - - user.group_connected_email_records[group._id]['last_sent'] = get_timestamp() - user.save() - -@group_added_to_node.connect -def subscribe_group_member(group, node, user, permission, auth, throttle=None): - try: - subscribe_user_to_notifications(node, user) - except InvalidSubscriptionError as err: - logger.warning(f'Skipping subscription of user {user} to node {node._id}') - logger.warning(f'Reason: {str(err)}') diff --git a/website/project/decorators.py b/website/project/decorators.py index 2d60be5359b..427c55f3b16 100644 --- a/website/project/decorators.py +++ b/website/project/decorators.py @@ -11,7 +11,7 @@ from framework.auth.decorators import collect_auth from framework.database import get_or_http_error -from osf.models import AbstractNode, Guid, Preprint, OSFGroup, Registration +from osf.models import AbstractNode, Guid, Preprint, Registration from osf.utils.permissions import WRITE from website import language from website.util import web_url_for @@ -75,7 +75,7 @@ def wrapped(*args, **kwargs): return wrapped -def must_be_valid_project(func=None, retractions_valid=False, quickfiles_valid=False, preprints_valid=False, groups_valid=False): +def must_be_valid_project(func=None, retractions_valid=False, preprints_valid=False, groups_valid=False): """ Ensures permissions to retractions are never implicitly granted. """ # TODO: Check private link @@ -88,13 +88,9 @@ def wrapped(*args, **kwargs): return func(*args, **kwargs) - if groups_valid and OSFGroup.load(kwargs.get('pid')): - kwargs['node'] = OSFGroup.load(kwargs.get('pid')) - return func(*args, **kwargs) - _inject_nodes(kwargs) - if getattr(kwargs['node'], 'is_collection', True) or (getattr(kwargs['node'], 'is_quickfiles', True) and not quickfiles_valid): + if getattr(kwargs['node'], 'is_collection', True): raise HTTPError( http_status.HTTP_404_NOT_FOUND ) diff --git a/website/project/signals.py b/website/project/signals.py index 70f25418c5a..1b8b0222b88 100644 --- a/website/project/signals.py +++ b/website/project/signals.py @@ -1,8 +1,6 @@ import blinker signals = blinker.Namespace() -comment_added = signals.signal('comment-added') -mention_added = signals.signal('mention-added') contributor_added = signals.signal('contributor-added') project_created = signals.signal('project-created') contributor_removed = signals.signal('contributor-removed') diff --git a/website/project/tasks.py b/website/project/tasks.py index 62d0d79c2af..7b36a903959 100644 --- a/website/project/tasks.py +++ b/website/project/tasks.py @@ -15,7 +15,7 @@ def on_node_updated(node_id, user_id, first_save, saved_fields, request_headers= AbstractNode = apps.get_model('osf.AbstractNode') node = AbstractNode.load(node_id) - if node.is_collection or node.archiving or node.is_quickfiles: + if node.is_collection or node.archiving: return need_update = bool(node.SEARCH_UPDATE_FIELDS.intersection(saved_fields)) diff --git a/website/project/views/comment.py b/website/project/views/comment.py index eb8d6b16271..5e274052f18 100644 --- a/website/project/views/comment.py +++ b/website/project/views/comment.py @@ -9,11 +9,8 @@ from addons.base.signals import file_updated from osf.models import BaseFileNode, TrashedFileNode from osf.models import Comment -from website.notifications.constants import PROVIDERS -from website.notifications.emails import notify, notify_mentions from website.project.decorators import must_be_contributor_or_public from osf.models import Node -from website.project.signals import comment_added, mention_added @file_updated.connect @@ -107,70 +104,6 @@ def render_email_markdown(content): return markdown.markdown(content, extensions=['markdown_del_ins', 'markdown.extensions.tables', 'markdown.extensions.fenced_code']) -@comment_added.connect -def send_comment_added_notification(comment, auth, new_mentions=None): - if not new_mentions: - new_mentions = [] - node = comment.node - target = comment.target - - context = dict( - profile_image_url=auth.user.profile_image_url(), - content=render_email_markdown(comment.content), - page_type=comment.get_comment_page_type(), - page_title=comment.get_comment_page_title(), - provider=PROVIDERS[comment.root_target.referent.provider] if comment.page == Comment.FILES else '', - target_user=target.referent.user if is_reply(target) else None, - parent_comment=target.referent.content if is_reply(target) else '', - url=comment.get_comment_page_url(), - exclude=new_mentions, - ) - time_now = timezone.now() - sent_subscribers = notify( - event='comments', - user=auth.user, - node=node, - timestamp=time_now, - **context - ) - - if is_reply(target): - if target.referent.user and target.referent.user._id not in sent_subscribers: - notify( - event='global_comment_replies', - user=auth.user, - node=node, - timestamp=time_now, - **context - ) - - -@mention_added.connect -def send_mention_added_notification(comment, new_mentions, auth): - node = comment.node - target = comment.target - - context = dict( - profile_image_url=auth.user.profile_image_url(), - content=render_email_markdown(comment.content), - page_type='file' if comment.page == Comment.FILES else node.project_or_component, - page_title=comment.root_target.referent.name if comment.page == Comment.FILES else '', - provider=PROVIDERS[comment.root_target.referent.provider] if comment.page == Comment.FILES else '', - target_user=target.referent.user if is_reply(target) else None, - parent_comment=target.referent.content if is_reply(target) else '', - new_mentions=new_mentions, - url=comment.get_comment_page_url() - ) - time_now = timezone.now() - notify_mentions( - event='global_mentions', - user=auth.user, - node=node, - timestamp=time_now, - **context - ) - - def is_reply(target): return isinstance(target.referent, Comment) diff --git a/website/project/views/contributor.py b/website/project/views/contributor.py index 485298eb8cb..f3e06aff3fc 100644 --- a/website/project/views/contributor.py +++ b/website/project/views/contributor.py @@ -19,7 +19,7 @@ from framework.utils import get_timestamp, throttle_period_expired from osf.models import Tag from osf.exceptions import NodeStateError -from osf.models import AbstractNode, DraftRegistration, OSFGroup, OSFUser, Preprint, PreprintProvider, RecentlyAddedContributor +from osf.models import AbstractNode, DraftRegistration, OSFUser, Preprint, PreprintProvider, RecentlyAddedContributor from osf.utils import sanitize from osf.utils.permissions import ADMIN from website import mails, language, settings @@ -732,18 +732,12 @@ def claim_user_registered(auth, node, **kwargs): if should_claim: node.replace_contributor(old=unreg_user, new=current_user) node.save() - if isinstance(node, OSFGroup): - status.push_status_message( - 'You are now a member of this OSFGroup.', - kind='success', - trust=False - ) - else: - status.push_status_message( - 'You are now a contributor to this project.', - kind='success', - trust=False - ) + + status.push_status_message( + 'You are now a contributor to this project.', + kind='success', + trust=False + ) return redirect(node.url) if is_json_request(): form_ret = forms.utils.jsonify(form) diff --git a/website/project/views/node.py b/website/project/views/node.py index 9bad3713d3b..32048444c7a 100644 --- a/website/project/views/node.py +++ b/website/project/views/node.py @@ -216,10 +216,6 @@ def project_new_node(auth, node, **kwargs): else: new_component.add_contributor(contributor, permissions=perm, auth=auth) - for group in node.osf_groups: - if group.is_manager(user): - new_component.add_osf_group(group, group.get_permission_to_node(node), auth=auth) - new_component.save() redirect_url = new_component.url + 'contributors/' message = ( @@ -837,7 +833,6 @@ def _view_project(node, auth, primary=False, 'storage_location': node.osfstorage_region.name, 'waterbutler_url': node.osfstorage_region.waterbutler_url, 'mfr_url': node.osfstorage_region.mfr_url, - 'groups': list(node.osf_groups.values_list('name', flat=True)), 'storage_limit_status': get_storage_limits_css(node), }, 'parent_node': { @@ -1299,7 +1294,7 @@ def search_node(auth, **kwargs): can_view_query, title__icontains=query, is_deleted=False - ).exclude(id__in=nin).exclude(type='osf.collection').exclude(type='osf.quickfilesnode')) + ).exclude(id__in=nin).exclude(type='osf.collection')) count = nodes.count() pages = math.ceil(count / size) diff --git a/website/reviews/listeners.py b/website/reviews/listeners.py index 27a15c2c337..d6f3471dac7 100644 --- a/website/reviews/listeners.py +++ b/website/reviews/listeners.py @@ -71,7 +71,7 @@ def reviews_submit_notification_moderators(self, timestamp, context): Handle email notifications to notify moderators of new submissions or resubmission. """ # imports moved here to avoid AppRegistryNotReady error - from osf.models import NotificationSubscription + from osf.models import NotificationSubscriptionLegacy from website.profile.utils import get_profile_image_url from website.notifications.emails import store_emails @@ -103,7 +103,7 @@ def reviews_submit_notification_moderators(self, timestamp, context): context['message'] = f'submitted "{resource.title}".' # Get NotificationSubscription instance, which contains reference to all subscribers - provider_subscription, created = NotificationSubscription.objects.get_or_create( + provider_subscription, created = NotificationSubscriptionLegacy.objects.get_or_create( _id=f'{provider._id}_new_pending_submissions', provider=provider ) @@ -138,7 +138,7 @@ def reviews_submit_notification_moderators(self, timestamp, context): @reviews_signals.reviews_withdraw_requests_notification_moderators.connect def reviews_withdraw_requests_notification_moderators(self, timestamp, context): # imports moved here to avoid AppRegistryNotReady error - from osf.models import NotificationSubscription + from osf.models import NotificationSubscriptionLegacy from website.profile.utils import get_profile_image_url from website.notifications.emails import store_emails @@ -146,7 +146,7 @@ def reviews_withdraw_requests_notification_moderators(self, timestamp, context): provider = resource.provider # Get NotificationSubscription instance, which contains reference to all subscribers - provider_subscription, created = NotificationSubscription.objects.get_or_create( + provider_subscription, created = NotificationSubscriptionLegacy.objects.get_or_create( _id=f'{provider._id}_new_pending_withdraw_requests', provider=provider ) @@ -191,13 +191,13 @@ def reviews_withdraw_requests_notification_moderators(self, timestamp, context): @reviews_signals.reviews_email_withdrawal_requests.connect def reviews_withdrawal_requests_notification(self, timestamp, context): # imports moved here to avoid AppRegistryNotReady error - from osf.models import NotificationSubscription + from osf.models import NotificationSubscriptionLegacy from website.notifications.emails import store_emails from website.profile.utils import get_profile_image_url from website import settings # Get NotificationSubscription instance, which contains reference to all subscribers - provider_subscription = NotificationSubscription.load( + provider_subscription = NotificationSubscriptionLegacy.load( '{}_new_pending_submissions'.format(context['reviewable'].provider._id)) preprint = context['reviewable'] preprint_word = preprint.provider.preprint_word diff --git a/website/routes.py b/website/routes.py index 7b0f325fa9f..1d03f538c31 100644 --- a/website/routes.py +++ b/website/routes.py @@ -1424,14 +1424,6 @@ def make_url_map(app): 'get', addon_views.addon_view_or_download_file_legacy, json_renderer - ), - Rule( - [ - '/quickfiles//' - ], - 'get', - addon_views.addon_view_or_download_quickfile, - json_renderer ) ]) diff --git a/website/search/elastic_search.py b/website/search/elastic_search.py index 36d528c8e33..00f9e96ceb8 100644 --- a/website/search/elastic_search.py +++ b/website/search/elastic_search.py @@ -19,8 +19,6 @@ from osf.models import BaseFileNode from osf.models import GuidMetadataRecord from osf.models import Institution -from osf.models import OSFGroup -from osf.models import QuickFilesNode from osf.models import Preprint from osf.models import SpamStatus from addons.wiki.models import WikiPage @@ -59,7 +57,6 @@ 'institution': Institution, 'preprint': Preprint, 'collectionSubmission': CollectionSubmission, - 'group': OSFGroup } # Prevent tokenizing and stop word removal. @@ -337,8 +334,6 @@ def load_parent(parent_id): def get_doctype_from_node(node): if isinstance(node, Preprint): return 'preprint' - if isinstance(node, OSFGroup): - return 'group' if node.is_registration: return 'registration' elif node.parent_node is None: @@ -367,15 +362,6 @@ def update_preprint_async(self, preprint_id, index=None, bulk=False): except Exception as exc: self.retry(exc=exc) -@celery_app.task(bind=True, max_retries=5, default_retry_delay=60) -def update_group_async(self, group_id, index=None, bulk=False, deleted_id=None): - OSFGroup = apps.get_model('osf.OSFGroup') - group = OSFGroup.load(group_id) - try: - update_group(group=group, index=index, bulk=bulk, async_update=True, deleted_id=deleted_id) - except Exception as exc: - self.retry(exc=exc) - @celery_app.task(bind=True, max_retries=5, default_retry_delay=60) def update_user_async(self, user_id, index=None): OSFUser = apps.get_model('osf.OSFUser') @@ -400,13 +386,6 @@ def serialize_node(node, category): for x in node.contributor_set.filter(visible=True).order_by('_order') .values('user__fullname', 'user__guids___id', 'user__is_active') ], - 'groups': [ - { - 'name': x['name'], - 'url': '/{}/'.format(x['_id']) - } - for x in node.osf_groups.values('name', '_id') - ], 'title': node.title, 'normalized_title': normalized_title, 'category': category, @@ -502,7 +481,7 @@ def update_node(node, index=None, bulk=False, async_update=False): file_.update_search() is_qa_node = bool(set(settings.DO_NOT_INDEX_LIST['tags']).intersection(node.tags.all().values_list('name', flat=True))) or any(substring in node.title for substring in settings.DO_NOT_INDEX_LIST['titles']) - if node.is_deleted or not node.is_public or node.archiving or node.is_spam or (node.spam_status == SpamStatus.FLAGGED and settings.SPAM_FLAGGED_REMOVE_FROM_SEARCH) or node.is_quickfiles or is_qa_node: + if node.is_deleted or not node.is_public or node.archiving or node.is_spam or (node.spam_status == SpamStatus.FLAGGED and settings.SPAM_FLAGGED_REMOVE_FROM_SEARCH) or is_qa_node: delete_doc(node._id, node, index=index) else: category = get_doctype_from_node(node) @@ -660,18 +639,6 @@ def update_user(user, index=None): if not user.is_active: try: client().delete(index=index, doc_type='user', id=user._id, refresh=True, ignore=[404]) - # update files in their quickfiles node if the user has been marked as spam - if user.spam_status == SpamStatus.SPAM: - quickfiles = QuickFilesNode.objects.get_for_user(user) - if quickfiles: - for quickfile_id in quickfiles.files.values_list('_id', flat=True): - client().delete( - index=index, - doc_type='file', - id=quickfile_id, - refresh=True, - ignore=[404] - ) except NotFoundError: pass return @@ -730,10 +697,7 @@ def update_file(file_, index=None, delete=False): provider=file_.provider, path=file_.path, ) - if getattr(target, 'is_quickfiles', None): - node_url = f'/{target.creator._id}/quickfiles/' - else: - node_url = f'/{target._id}/' + node_url = f'/{target._id}/' guid_url = None file_guid = file_.get_guid(create=False) diff --git a/website/search_migration/__init__.py b/website/search_migration/__init__.py index d0dcc8e1a2d..d63116bcfcb 100644 --- a/website/search_migration/__init__.py +++ b/website/search_migration/__init__.py @@ -27,22 +27,7 @@ LEFT OUTER JOIN osf_guid AS USER_GUID ON (U.id = USER_GUID.object_id AND (USER_GUID.content_type_id = (SELECT id FROM django_content_type WHERE model = 'osfuser'))) WHERE (CONTRIB.node_id = N.id AND CONTRIB.visible = TRUE)) - , 'groups', (SELECT json_agg(json_build_object( - 'url', '/' || osf_osfgroup._id || '/' - , 'name', osf_osfgroup.name - )) - FROM osf_osfgroup - WHERE osf_osfgroup.id IN ( - SELECT GGOP.content_object_id AS osfgroup_id - FROM osf_osfgroupgroupobjectpermission GGOP - WHERE GGOP.group_id IN ( - SELECT DISTINCT AG.id AS osfgroup_id - FROM auth_group AG - INNER JOIN osf_nodegroupobjectpermission NGOP - ON (AG.id = NGOP.group_id) - WHERE (NGOP.content_object_id = N.id AND UPPER(AG.name::text) LIKE UPPER('%osfgroup_%')) - ) - )) + , 'groups', NULL , 'extra_search_terms', CASE WHEN strpos(N.title, '-') + strpos(N.title, '_') + strpos(N.title, '.') > 0 THEN translate(N.title, '-_.', ' ') @@ -441,7 +426,7 @@ AND name != '' AND target_object_id = ANY (SELECT id FROM osf_abstractnode - WHERE (TYPE = 'osf.node' OR TYPE = 'osf.registration' OR TYPE = 'osf.quickfilesnode') + WHERE (TYPE = 'osf.node' OR TYPE = 'osf.registration') AND is_public IS TRUE AND is_deleted IS FALSE AND (spam_status IS NULL OR NOT (spam_status = 2 or (spam_status = 1 AND {spam_flagged_removed_from_search}))) @@ -627,7 +612,7 @@ AND content_type_id = (SELECT id FROM django_content_type WHERE model = 'abstractnode') LIMIT 1 ) PARENT_GUID ON TRUE -WHERE NOT ((TYPE = 'osf.node' OR TYPE = 'osf.registration' OR TYPE = 'osf.quickfilesnode') +WHERE NOT ((TYPE = 'osf.node' OR TYPE = 'osf.registration') AND N.is_public IS TRUE AND N.is_deleted IS FALSE AND (spam_status IS NULL OR NOT (spam_status = 2 or (spam_status = 1 AND {spam_flagged_removed_from_search}))) diff --git a/website/search_migration/migrate.py b/website/search_migration/migrate.py index 7ae7f1431e4..545a43a66ac 100644 --- a/website/search_migration/migrate.py +++ b/website/search_migration/migrate.py @@ -15,7 +15,7 @@ JSON_UPDATE_FILES_SQL, JSON_DELETE_FILES_SQL, JSON_UPDATE_USERS_SQL, JSON_DELETE_USERS_SQL) from scripts import utils as script_utils -from osf.models import OSFUser, Institution, AbstractNode, BaseFileNode, Preprint, OSFGroup, CollectionSubmission +from osf.models import OSFUser, Institution, AbstractNode, BaseFileNode, Preprint, CollectionSubmission from website import settings from website.app import init_app from website.search.elastic_search import client as es_client @@ -109,15 +109,6 @@ def migrate_preprint_files(index, delete): logger.info(f'Updating page {page_number} / {paginator.num_pages}') search.bulk_update_nodes(serialize, paginator.page(page_number).object_list, index=index, category='file') -def migrate_groups(index, delete): - logger.info(f'Migrating groups to index: {index}') - groups = OSFGroup.objects.all() - increment = 100 - paginator = Paginator(groups, increment) - for page_number in paginator.page_range: - logger.info(f'Updating page {page_number} / {paginator.num_pages}') - OSFGroup.bulk_update_search(paginator.page(page_number).object_list, index=index) - def migrate_files(index, delete, increment=10000): logger.info(f'Migrating files to index: {index}') max_fid = BaseFileNode.objects.last().id @@ -217,7 +208,6 @@ def migrate(delete, remove=False, index=None, app=None): migrate_preprints(new_index, delete=delete) migrate_preprint_files(new_index, delete=delete) migrate_collected_metadata(new_index, delete=delete) - migrate_groups(new_index, delete=delete) set_up_alias(index, new_index) diff --git a/website/settings/defaults.py b/website/settings/defaults.py index af3b23e31ed..a9ee3085a9c 100644 --- a/website/settings/defaults.py +++ b/website/settings/defaults.py @@ -160,6 +160,10 @@ def parent_dir(path): MAIL_USERNAME = 'osf-smtp' MAIL_PASSWORD = '' # Set this in local.py +MAILHOG_HOST = 'mailhog' +MAILHOG_PORT = 1025 +MAILHOG_API_HOST = 'http://mailhog:8025' + # OR, if using Sendgrid's API # WARNING: If `SENDGRID_WHITELIST_MODE` is True, # `tasks.send_email` would only email recipients included in `SENDGRID_EMAIL_WHITELIST` @@ -175,6 +179,7 @@ def parent_dir(path): MAILCHIMP_LIST_MAP = { MAILCHIMP_GENERAL_LIST: '123', } +NOTIFICATION_TYPES_YAML = 'notifications.yaml' #Triggered emails OSF_HELP_LIST = 'Open Science Framework Help' @@ -295,9 +300,6 @@ def parent_dir(path): # Seconds before another notification email can be sent to a contributor when added to a project CONTRIBUTOR_ADDED_EMAIL_THROTTLE = 24 * 3600 -# Seconds before another notification email can be sent to a member when added to an OSFGroup -GROUP_MEMBER_ADDED_EMAIL_THROTTLE = 24 * 3600 - # Seconds before another notification email can be sent to group members when added to a project GROUP_CONNECTED_EMAIL_THROTTLE = 24 * 3600 @@ -475,7 +477,6 @@ class CeleryConfig: 'website.archiver.tasks', 'scripts.add_missing_identifiers_to_preprints', 'osf.management.commands.approve_pending_schema_response', - 'osf.management.commands.fix_quickfiles_waterbutler_logs', 'api.share.utils', } @@ -582,8 +583,6 @@ class CeleryConfig: 'osf.management.commands.archive_registrations_on_IA', 'osf.management.commands.populate_initial_schema_responses', 'osf.management.commands.approve_pending_schema_responses', - 'osf.management.commands.delete_legacy_quickfiles_nodes', - 'osf.management.commands.fix_quickfiles_waterbutler_logs', 'osf.management.commands.sync_doi_metadata', 'api.providers.tasks', 'osf.management.commands.daily_reporters_go', @@ -749,11 +748,6 @@ class CeleryConfig: 'schedule': crontab(minute=0, hour=5), # Daily 12 a.m 'kwargs': {'dry_run': False}, }, - 'delete_legacy_quickfiles_nodes': { - 'task': 'osf.management.commands.delete_legacy_quickfiles_nodes', - 'schedule': crontab(minute=0, hour=5), # Daily 12 a.m - 'kwargs': {'dry_run': False, 'batch_size': 10000}, - }, } # Tasks that need metrics and release requirements diff --git a/website/settings/local-ci.py b/website/settings/local-ci.py index 8bf283b6338..c63fce5a86a 100644 --- a/website/settings/local-ci.py +++ b/website/settings/local-ci.py @@ -52,6 +52,10 @@ MAIL_USERNAME = 'osf-smtp' MAIL_PASSWORD = 'CHANGEME' +MAILHOG_HOST = 'localhost' +MAILHOG_PORT = 1025 +MAILHOG_API_HOST = 'http://localhost:8025' + # Session COOKIE_NAME = 'osf' SECRET_KEY = 'CHANGEME' diff --git a/website/settings/local-dist.py b/website/settings/local-dist.py index 3c91142dcb0..212b9926f7e 100644 --- a/website/settings/local-dist.py +++ b/website/settings/local-dist.py @@ -62,6 +62,10 @@ MAIL_USERNAME = 'osf-smtp' MAIL_PASSWORD = 'CHANGEME' +MAILHOG_HOST = 'mailhog' +MAILHOG_PORT = 1025 +MAILHOG_API_HOST = 'http://mailhog:8025' + # Mailchimp email subscriptions ENABLE_EMAIL_SUBSCRIPTIONS = False diff --git a/website/signals.py b/website/signals.py index c1b8660dcd4..3d1d2233adf 100644 --- a/website/signals.py +++ b/website/signals.py @@ -3,13 +3,10 @@ from framework.auth import signals as auth from website.project import signals as project from addons.base import signals as event -from website.conferences import signals as conference from website.reviews import signals as reviews ALL_SIGNALS = [ # TODO: Fix - project.comment_added, - project.mention_added, project.unreg_contributor_added, project.contributor_added, project.contributor_removed, @@ -23,6 +20,5 @@ auth.user_account_merged, auth.unconfirmed_user_created, event.file_updated, - conference.osf4m_user_created, reviews.reviews_email ] diff --git a/website/static/js/anonymousLogActionsList.json b/website/static/js/anonymousLogActionsList.json index e047fbdfc29..17642a945f6 100644 --- a/website/static/js/anonymousLogActionsList.json +++ b/website/static/js/anonymousLogActionsList.json @@ -92,7 +92,6 @@ "subjects_updated": "A user updated the subjects", "view_only_link_added": "A user created a view-only link to a project", "view_only_link_removed": "A user removed a view-only link to a project", - "migrated_quickfiles": "QuickFiles were migrated into a public project", "resource_identifier_added": "A Resource has been added to the Node", "resource_identifier_removed": "A Resource has been removed from the Node", "resource_identifier_updated": "A Resource on the Node has had its PID updated" diff --git a/website/static/js/components/quickFiles.js b/website/static/js/components/quickFiles.js deleted file mode 100644 index 3515d61da34..00000000000 --- a/website/static/js/components/quickFiles.js +++ /dev/null @@ -1,150 +0,0 @@ -'use strict'; - -var m = require('mithril'); // exposes mithril methods, useful for redraw etc. -var $osf = require('js/osfHelpers'); -var iconmap = require('js/iconmap'); -var lodashFind = require('lodash.find'); -var mHelpers = require('js/mithrilHelpers'); -var Raven = require('raven-js'); - -var withPagination = require('js/components/pagination').withPagination; - -var QUICKFILES_PAGE_SIZE = 10; - - -var _buildUrl = function(page, user) { - - var query = { - 'page[size]': QUICKFILES_PAGE_SIZE, - 'page': page || 1, - 'version': '2.2', - }; - - return $osf.apiV2Url('users/' + user + '/quickfiles/', { query: query}); -}; - - -var _getNextItems = function(ctrl, url, updatePagination) { - if(ctrl.requestPending()) { - return; - } - - ctrl.quickFiles([]); - ctrl.requestPending(true); - - var promise = m.request({ - method : 'GET', - url : url, - background : true, - config: mHelpers.apiV2Config({withCredentials: window.contextVars.isOnRootDomain}) - }); - - promise.then( - function(result) { - ctrl.requestPending(false); - ctrl.quickFiles(result.data); - updatePagination(result, url); - m.redraw(); - return promise; - }, function(xhr, textStatus, error) { - ctrl.failed = true; - ctrl.requestPending(false); - m.redraw(); - Raven.captureMessage('Error retrieving quickfiles', { - extra: { - url: url, - textStatus: textStatus, - error: error - } - }); - } - ); -}; - - -var QuickFile = { - - controller: function(options) { - var self = this; - self.file = options.file; - self.icon = iconmap.file; - }, - - view: function(ctrl) { - var viewBase = window.location.origin; - var viewUrl = ctrl.file.attributes.guid ? viewBase + '/' + ctrl.file.attributes.guid : viewBase + '/quickfiles' + ctrl.file.attributes.path; - return m('div', [ - m('li.project list-group-item list-group-item-node cite-container', [ - m('p.list-group-item-heading', [ - m('span.component-overflow.f-w-lg', {style: {lineHeight: 1.5, width: '100%'}}, [ - m('span.col-md-8.project-statuses-lg', [ - m('span', {class: ctrl.icon, style: 'padding-right: 5px;'}, ''), - m('a', {'href': viewUrl, - onclick : function () { - $osf.trackClick('QuickFiles', 'view', 'view-quickfile-from-profile-page'); - } - }, ctrl.file.attributes.name), - ]) - ]) - ]) - ]) - ]); - } -}; - -var QuickFiles = { - - controller: function (options) { - var self = this; - self.failed = false; - self.user = options.user._id; - self.isProfile = options.user.is_profile; - - self.quickFiles = m.prop([]); - self.requestPending = m.prop(false); - - self.getCurrentQuickFiles = function _getCurrentQuickFiles(page) { - if (!self.requestPending()) { - var url = _buildUrl(page, self.user); - return _getNextItems(self, url, options.updatePagination); - } - }; - self.getCurrentQuickFiles(); - }, - - view: function (ctrl) { - - return m('p.list-group m-md', [ - // Error message if the request fails - ctrl.failed ? m('p', [ - 'Unable to retrieve quickfiles at this time. Please refresh the page or contact ', - m('a', {'href': 'mailto:support@osf.io'}, 'support@osf.io'), - ' if the problem persists.' - ]) : - - // Show laoding icon while there is a pending request - ctrl.requestPending() ? m('.ball-pulse.ball-scale-blue.text-center', [m(''), m(''), m('')]) : - - // Display each quickfile - [ - ctrl.quickFiles().length !== 0 ? ctrl.quickFiles().map(function(file) { - return m.component(QuickFile, {file: file}); - }) : ctrl.isProfile ? - m('div.help-block', {}, 'You have no public quickfiles') - : m('div.help-block', {}, 'This user has no public quickfiles.') - ] - ]); - } -}; - -var PaginationWrapper = withPagination({ - buildUrl: _buildUrl, - getNextItems: _getNextItems -}); - -QuickFiles = new PaginationWrapper(QuickFiles); - - -module.exports = { - QuickFiles: QuickFiles -}; diff --git a/website/static/js/logActionsList.json b/website/static/js/logActionsList.json index 53c5ef02f04..4b17c8c855c 100644 --- a/website/static/js/logActionsList.json +++ b/website/static/js/logActionsList.json @@ -104,7 +104,6 @@ "prereg_links_updated": "${user} has updated their preregistration data links", "why_no_prereg_updated": "${user} has updated their preregistration data availability statement", "prereg_links_info_updated": "${user} has updated their preregistration links to ${value}", - "migrated_quickfiles": "${user} had their QuickFiles migrated into ${node}", "resource_identifier_added": "${user} has added a Resource with DOI ${new_identifier} to Registration ${node}", "resource_identifier_removed": "${user} has removed a Resource with DOI ${obsolete_identifier} to Registration ${node}", "resource_identifier_updated": "${user} has updated a Resource DOI on Registration ${node} from ${obsolete_identifier} to ${new_identifier}" diff --git a/website/static/js/pages/profile-page.js b/website/static/js/pages/profile-page.js index bb457f9a497..2df42fb3aaa 100644 --- a/website/static/js/pages/profile-page.js +++ b/website/static/js/pages/profile-page.js @@ -10,7 +10,6 @@ require('../project.js'); // Needed for nodelists to work require('../components/logFeed.js'); // Needed for nodelists to work var profile = require('../profile.js'); // Social, Job, Education classes var publicNodes = require('../components/publicNodes.js'); -var quickFiles = require('../components/quickFiles.js'); var ctx = window.contextVars; // Instantiate all the profile modules diff --git a/website/templates/emails/comment_replies.html.mako b/website/templates/emails/comment_replies.html.mako deleted file mode 100644 index ab0cc25a0d1..00000000000 --- a/website/templates/emails/comment_replies.html.mako +++ /dev/null @@ -1,17 +0,0 @@ - - - - - - -
    avatar - ${user.fullname} - replied to your comment "${parent_comment}" on your ${provider + ' ' if page_type == 'file' else ''}${page_type} - %if page_type == 'file' or page_type == 'wiki': - ${page_title} - %endif - at ${localized_timestamp}: - ${content} -
    diff --git a/website/templates/emails/comments.html.mako b/website/templates/emails/comments.html.mako deleted file mode 100644 index 2537189bb09..00000000000 --- a/website/templates/emails/comments.html.mako +++ /dev/null @@ -1,19 +0,0 @@ -<% from osf.models import OSFUser %> - - - - - - - -
    avatar - ${user.fullname} - commented on your ${provider + ' ' if page_type == 'file' else ''}${page_type} - %if page_type == 'file' or page_type == 'wiki': - ${page_title} - %endif - at ${localized_timestamp}: - ${content} -
    diff --git a/website/templates/emails/conference_deprecation.html.mako b/website/templates/emails/conference_deprecation.html.mako deleted file mode 100644 index 4453c4db36d..00000000000 --- a/website/templates/emails/conference_deprecation.html.mako +++ /dev/null @@ -1,17 +0,0 @@ -<%inherit file="notify_base.mako" /> - -<%def name="content()"> - - - Hello ${fullname},
    -
    - You recently attempted to interact with the Meeting service via email, but this service has been discontinued and is no longer available for new interactions.
    -
    - Existing meetings and past submissions remain unchanged. If you have any questions or need further assistance, please contact our support team at [ ${support_email} ].
    -
    - Sincerely yours,
    -
    - The OSF Robot
    - - - \ No newline at end of file diff --git a/website/templates/emails/conference_failed.html.mako b/website/templates/emails/conference_failed.html.mako deleted file mode 100644 index c64e44f210e..00000000000 --- a/website/templates/emails/conference_failed.html.mako +++ /dev/null @@ -1,16 +0,0 @@ -<%inherit file="notify_base.mako" /> - -<%def name="content()"> - - - Hello ${fullname},
    -
    - You recently tried to create a project on the Open Science Framework via email, but your message did not contain any file attachments. Please try again, making sure to attach the files you'd like to upload to your message.
    -
    - - Sincerely yours,
    -
    - The OSF Robot
    - - - diff --git a/website/templates/emails/conference_inactive.html.mako b/website/templates/emails/conference_inactive.html.mako deleted file mode 100644 index f5547a50b06..00000000000 --- a/website/templates/emails/conference_inactive.html.mako +++ /dev/null @@ -1,15 +0,0 @@ -<%inherit file="notify_base.mako" /> - -<%def name="content()"> - - - Hello ${fullname},
    -
    - You recently tried to create a project on the Open Science Framework via email, but the conference you attempted to submit to is not currently accepting new submissions. For a list of conferences, see [ ${presentations_url} ].
    -
    - Sincerely yours,
    -
    - The OSF Robot
    - - - diff --git a/website/templates/emails/conference_submitted.html.mako b/website/templates/emails/conference_submitted.html.mako deleted file mode 100644 index 60f190cf353..00000000000 --- a/website/templates/emails/conference_submitted.html.mako +++ /dev/null @@ -1,34 +0,0 @@ -<%inherit file="notify_base.mako" /> - -<%def name="content()"> - - - Hello ${fullname},
    -
    - Congratulations! You have successfully added your ${conf_full_name} ${presentation_type} to OSF.
    -
    - % if user_created: - Your account on OSF has been created. To claim your account, please create a password by clicking here: ${set_password_url}. Please verify your profile information at: ${profile_url}.
    -
    - % endif - You now have a permanent, citable URL, that you can share: ${node_url}. All submissions for ${conf_full_name} may be viewed at the following link: ${conf_view_url}.
    -
    - % if is_spam: - Your email was flagged as spam by our mail processing service. To prevent potential spam, we have made your project private. If this is a real project, please log in to your account, browse to your project, and click the "Make Public" button so that other users can view it.
    -
    - % endif - Get more from OSF by enhancing your project with the following:
    -
    - * Collaborators/contributors to the submission
    - * Charts, graphs, and data that didn't make it onto the submission
    - * Links to related publications or reference lists
    - * Connecting other accounts, like Dropbox, Google Drive, GitHub, figshare and Mendeley via add-on integration. Learn more and read the full list of available add-ons here.
    -
    - To learn more about OSF, read the Guides.
    -
    - Sincerely,
    -
    - The OSF Team
    - - - diff --git a/website/templates/emails/confirm_agu_conference.html.mako b/website/templates/emails/confirm_agu_conference.html.mako deleted file mode 100644 index 603e2c39e8d..00000000000 --- a/website/templates/emails/confirm_agu_conference.html.mako +++ /dev/null @@ -1,26 +0,0 @@ -<%inherit file="notify_base.mako" /> - -<%def name="content()"> - - - Hello ${user.fullname},
    -
    - - Thank you for joining us at the AGU Open Science Pavilion, and welcome to the Open Science Framework (OSF). - - We are pleased to offer a special AGU attendees exclusive 1:1 consultation to continue our conversation and to help - you get oriented on the OSF. This is an opportunity for us to show you useful OSF features, talk about - open science in Earth and space sciences, and for you to ask any questions you may have. - You can sign up to participate by completing this form, and a member of our team will be in touch to - determine your availability: -
    - https://docs.google.com/forms/d/e/1FAIpQLSeJ23YPaEMdbLY1OqbcP85Tt6rhLpFoOtH0Yg4vY_wSKULRcw/viewform?usp=sf_link -

    - To confirm your OSF account, please verify your email address by visiting this link:
    -
    - ${confirmation_url}
    -
    - From the team at the Center for Open Science
    - - - diff --git a/website/templates/emails/confirm_agu_conference_2023.html.mako b/website/templates/emails/confirm_agu_conference_2023.html.mako deleted file mode 100644 index 429ec911410..00000000000 --- a/website/templates/emails/confirm_agu_conference_2023.html.mako +++ /dev/null @@ -1,25 +0,0 @@ -<%inherit file="notify_base.mako" /> - -<%def name="content()"> - - - Hello ${user.fullname},
    -
    - - Thank you for joining us at the AGU Open Science Pavilion, and welcome to the Open Science Framework. - - We are pleased to offer a special AGU attendees exclusive community call to continue our conversation and to help - you get oriented on the OSF. This is an opportunity for us to show you useful OSF features, talk about - open science in your domains, and for you to ask any questions you may have. - You can register for this free event here: -
    - https://cos-io.zoom.us/meeting/register/tZAuceCvrjotHNG3n6XzLFDv1Rnn2hkjczHr -

    - To confirm your OSF account, please verify your email address by visiting this link:
    -
    - ${confirmation_url}
    -
    - From the team at the Center for Open Science
    - - - diff --git a/website/templates/emails/group_added_to_node.html.mako b/website/templates/emails/group_added_to_node.html.mako deleted file mode 100644 index cf4f42aa102..00000000000 --- a/website/templates/emails/group_added_to_node.html.mako +++ /dev/null @@ -1,23 +0,0 @@ -<%inherit file="notify_base.mako" /> -<%def name="content()"> - - - <%! - from website import settings - %> - Hello ${user.fullname},
    -
    - ${referrer_name + ' has given your group, ' + group_name + ',' if referrer_name else 'Your group, ' + group_name + ', has been given'} ${permission} permissions to the project "${node.title}" on OSF: ${node.absolute_url}
    -
    - You will ${'not receive ' if all_global_subscriptions_none else 'be automatically subscribed to '}notification emails for this project. To change your email notification preferences, visit your project or your user settings: ${settings.DOMAIN + "settings/notifications/"}
    -
    - Sincerely,
    -
    - Open Science Framework Robot
    -
    - Want more information? Visit https://osf.io/ to learn about the Open Science Framework, or https://cos.io/ for information about its supporting organization, the Center for Open Science.
    -
    - Questions? Email ${osf_contact_email}
    - - - diff --git a/website/templates/emails/group_member_added.html.mako b/website/templates/emails/group_member_added.html.mako deleted file mode 100644 index 2b8532f0190..00000000000 --- a/website/templates/emails/group_member_added.html.mako +++ /dev/null @@ -1,24 +0,0 @@ -<%inherit file="notify_base.mako" /> - -<%def name="content()"> - - - <%! - from website import settings - %> - Hello ${user.fullname},
    -
    - ${referrer_name + ' has added you' if referrer_name else 'You have been added'} as a ${permission} of the group "${group_name}" on OSF.
    -
    - If you have erroneously been added to the group "${group_name}," please contact a group administrator.
    -
    - Sincerely,
    -
    - Open Science Framework Robot
    -
    - Want more information? Visit https://osf.io/ to learn about the Open Science Framework, or https://cos.io/ for information about its supporting organization, the Center for Open Science.
    -
    - Questions? Email ${osf_contact_email}
    - - - diff --git a/website/templates/emails/group_member_unregistered_added.html.mako b/website/templates/emails/group_member_unregistered_added.html.mako deleted file mode 100644 index bc9dee5cc24..00000000000 --- a/website/templates/emails/group_member_unregistered_added.html.mako +++ /dev/null @@ -1,24 +0,0 @@ -<%inherit file="notify_base.mako" /> - -<%def name="content()"> - - - <%! - from website import settings - %> - Hello ${user.fullname},
    -
    - ${referrer_name + ' has added you' if referrer_name else 'You have been added'} to the group "${group_name}" on OSF. To set a password for your account, visit:
    -
    - ${claim_url}
    -
    - Once you have set a password, you will be able to create your own groups and projects. -
    - If you are not ${user.fullname} or you are erroneously being associated with "${group_name}," please email ${osf_contact_email} with the subject line "Claiming Error" to report the problem.
    -
    - Sincerely,
    -
    - The OSF Team
    -
    - - diff --git a/website/templates/emails/quickfiles_migrated.html.mako b/website/templates/emails/quickfiles_migrated.html.mako deleted file mode 100644 index 94948b2a066..00000000000 --- a/website/templates/emails/quickfiles_migrated.html.mako +++ /dev/null @@ -1,31 +0,0 @@ -<%inherit file="notify_base.mako" /> - -<%def name="content()"> - - - <%!from website import settings%> - Hello ${user.fullname}, -

    - The Quick Files feature has been discontinued and your files have been migrated into an OSF Project. You can find the new Project on your My Projects page, entitled "${user.fullname}'s Quick Files". Your favorite Quick Files features are still present; you can view, download, and share your files from their new location. Your file URL's will also continue to resolve properly, and you can still move your files between Projects by linking your Projects. Contact ${settings.OSF_CONTACT_EMAIL} if you have any questions or concerns. -

    -

    - Thank you for partnering with us as a stakeholder in open science and in the success of the infrastructure that help make it possible. -

    -

    - The Center for Open Science Team -

    -

    - Sincerely,
    - The OSF Team -

    -

    - Want more information? Visit ${settings.DOMAIN} to learn about the OSF, - or https://cos.io/ for information about its supporting organization, - the Center for Open Science. -

    -

    - Questions? Email ${settings.OSF_CONTACT_EMAIL} -

    - - - diff --git a/website/templates/include/profile/names.mako b/website/templates/include/profile/names.mako index 12449628081..b20e18773c6 100644 --- a/website/templates/include/profile/names.mako +++ b/website/templates/include/profile/names.mako @@ -4,8 +4,8 @@
    - ## Maxlength for full names must be 186 - quickfile titles use fullname + 's Quick Files - + ## Maxlength for full names must be 186 +

    diff --git a/website/templates/project/project.mako b/website/templates/project/project.mako index bcba7b24ff6..edd88d87d65 100644 --- a/website/templates/project/project.mako +++ b/website/templates/project/project.mako @@ -171,20 +171,6 @@ % endif
    - % if node['groups']: -
    - Groups: - %for i, group_name in enumerate(node['groups']): -
      - % if i == len(node['groups']) - 1: - ${group_name} - % else: - ${group_name}, - % endif -
    - %endfor -
    - % endif % if enable_institutions and not node['anonymous']: % if (permissions.ADMIN in user['permissions'] and not node['is_registration']) and (len(node['institutions']) != 0 or len(user['institutions']) != 0): Affiliated Institutions: diff --git a/website/templates/public/register.mako b/website/templates/public/register.mako index 05ccb59efa8..290c991a425 100644 --- a/website/templates/public/register.mako +++ b/website/templates/public/register.mako @@ -91,7 +91,7 @@
    - ## Maxlength for full names must be 186 - quickfile titles use fullname + 's Quick Files + ## Maxlength for full names must be 186
    diff --git a/website/templates/search.mako b/website/templates/search.mako index 78ec1b10e3d..bad65e38d81 100644 --- a/website/templates/search.mako +++ b/website/templates/search.mako @@ -249,16 +249,6 @@

    - -

    - Groups: - - - -  - - -

    -

    Affiliated institutions: diff --git a/website/templates/util/render_node.mako b/website/templates/util/render_node.mako index 69af32bb624..0c2cb4c51ce 100644 --- a/website/templates/util/render_node.mako +++ b/website/templates/util/render_node.mako @@ -100,11 +100,6 @@

    ${contributor_list.render_contributors(contributors=summary['contributors'], others_count=summary['others_count'], node_url=summary['url'])}
    - % if summary['groups']: -
    - ${summary['groups']} -
    - % endif % else:
    Anonymous Contributors
    % endif diff --git a/website/views.py b/website/views.py index 72a69f0bd26..07d16a22abe 100644 --- a/website/views.py +++ b/website/views.py @@ -67,22 +67,6 @@ def serialize_contributors_for_summary(node, max_count=3): 'others_count': others_count, } -def serialize_groups_for_summary(node): - groups = node.osf_groups - n_groups = len(groups) - group_string = '' - for index, group in enumerate(groups): - if index == n_groups - 1: - separator = '' - elif index == n_groups - 2: - separator = ' & ' - else: - separator = ', ' - - group_string = group_string + group.name + separator - - return group_string - def serialize_node_summary(node, auth, primary=True, show_path=False): is_registration = node.is_registration @@ -140,7 +124,6 @@ def serialize_node_summary(node, auth, primary=True, show_path=False): 'show_path': show_path, 'contributors': contributor_data['contributors'], 'others_count': contributor_data['others_count'], - 'groups': serialize_groups_for_summary(node), 'description': node.description if len(node.description) <= 150 else node.description[0:150] + '...', }) else: