diff --git a/admin/draft_registrations/urls.py b/admin/draft_registrations/urls.py index 84d8ef6525c..abc5b18091f 100644 --- a/admin/draft_registrations/urls.py +++ b/admin/draft_registrations/urls.py @@ -9,4 +9,6 @@ urlpatterns = [ re_path(r'^$', views.UserDraftRegistrationSearchView.as_view(), name='search'), re_path(r'^(?P\w+)/$', views.DraftRegistrationView.as_view(), name='detail'), + re_path(r'^(?P\w+)/modify_storage_usage/$', views.DraftRegisrationModifyStorageUsage.as_view(), + name='adjust-draft-registration-storage-usage'), ] diff --git a/admin/draft_registrations/views.py b/admin/draft_registrations/views.py index 165407e3cc3..c49abdadaee 100644 --- a/admin/draft_registrations/views.py +++ b/admin/draft_registrations/views.py @@ -1,18 +1,35 @@ from django.urls import NoReverseMatch, reverse from django.contrib import messages from django.contrib.auth.mixins import PermissionRequiredMixin +from django.db.models import F, Case, When, IntegerField from django.shortcuts import redirect from django.views.generic import FormView from django.views.generic import DetailView from admin.base.forms import GuidForm +from website import settings + from osf.models.registrations import DraftRegistration class DraftRegistrationMixin(PermissionRequiredMixin): def get_object(self): - draft_registration = DraftRegistration.load(self.kwargs['draft_registration_id']) + draft_registration = DraftRegistration.objects.filter( + _id=self.kwargs['draft_registration_id'] + ).annotate( + cap=Case( + When( + custom_storage_usage_limit=None, + then=settings.STORAGE_LIMIT_PRIVATE, + ), + When( + custom_storage_usage_limit__gt=0, + then=F('custom_storage_usage_limit'), + ), + output_field=IntegerField() + ) + ).first() draft_registration.guid = draft_registration._id return draft_registration @@ -52,3 +69,23 @@ def get_context_data(self, **kwargs): return super().get_context_data(**{ 'draft_registration': draft_registration }, **kwargs) + + +class DraftRegisrationModifyStorageUsage(DraftRegistrationMixin, DetailView): + template_name = 'draft_registrations/detail.html' + permission_required = 'osf.change_draftregistration' + + def post(self, request, *args, **kwargs): + draft = self.get_object() + new_cap = request.POST.get('cap-input') + + draft_cap = draft.custom_storage_usage_limit or settings.STORAGE_LIMIT_PRIVATE + if float(new_cap) <= 0: + messages.error(request, 'Draft registration should have a positive storage limit') + return redirect(self.get_success_url()) + + if float(new_cap) != draft_cap: + draft.custom_storage_usage_limit = new_cap + + draft.save() + return redirect(self.get_success_url()) diff --git a/admin/nodes/views.py b/admin/nodes/views.py index 40cf261945d..f76db366f97 100644 --- a/admin/nodes/views.py +++ b/admin/nodes/views.py @@ -636,6 +636,9 @@ def post(self, request, *args, **kwargs): node_private_cap = node.custom_storage_usage_limit_private or settings.STORAGE_LIMIT_PRIVATE node_public_cap = node.custom_storage_usage_limit_public or settings.STORAGE_LIMIT_PUBLIC + if float(new_private_cap) <= 0 or float(new_public_cap) <= 0: + messages.error(request, 'Node should have positive storage limits') + return redirect(self.get_success_url()) if float(new_private_cap) != node_private_cap: node.custom_storage_usage_limit_private = new_private_cap diff --git a/admin/templates/draft_registrations/detail.html b/admin/templates/draft_registrations/detail.html index e20be95469a..6c88c5c9c76 100644 --- a/admin/templates/draft_registrations/detail.html +++ b/admin/templates/draft_registrations/detail.html @@ -54,12 +54,7 @@

Draft Registration: {{ draft_registration.title }} - Node storage usage - - Current usage: {{ draft_registration.storage_usage }}
- - + {% include "draft_registrations/storage_usage.html" with draft_registration=draft_registration %} diff --git a/admin/templates/draft_registrations/storage_usage.html b/admin/templates/draft_registrations/storage_usage.html new file mode 100644 index 00000000000..47c944f6d6a --- /dev/null +++ b/admin/templates/draft_registrations/storage_usage.html @@ -0,0 +1,33 @@ +{% load node_extras %} + + + Draft registration storage usage + + Public and private cap: {{ draft_registration.cap|floatformat:0 }} GB
+
+ Modify Storage Caps + + + + diff --git a/osf/migrations/0031_draftregistration_custom_storage_usage_limit.py b/osf/migrations/0031_draftregistration_custom_storage_usage_limit.py new file mode 100644 index 00000000000..c4113159753 --- /dev/null +++ b/osf/migrations/0031_draftregistration_custom_storage_usage_limit.py @@ -0,0 +1,18 @@ +# Generated by Django 4.2.15 on 2025-06-24 13:37 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('osf', '0030_abstractnode__manual_guid'), + ] + + operations = [ + migrations.AddField( + model_name='draftregistration', + name='custom_storage_usage_limit', + field=models.DecimalField(blank=True, decimal_places=9, max_digits=100, null=True), + ), + ] diff --git a/osf/models/registrations.py b/osf/models/registrations.py index e62bf5f14bf..b6a1e6fa4d3 100644 --- a/osf/models/registrations.py +++ b/osf/models/registrations.py @@ -992,6 +992,8 @@ class DraftRegistration(ObjectIDMixin, RegistrationResponseMixin, DirtyFieldsMix default=get_default_id, ) + custom_storage_usage_limit = models.DecimalField(decimal_places=9, max_digits=100, null=True, blank=True) + # Dictionary field mapping question id to a question's comments and answer # { # : { diff --git a/osf_tests/test_archiver.py b/osf_tests/test_archiver.py index 59c178b839d..ab25a15c6b5 100644 --- a/osf_tests/test_archiver.py +++ b/osf_tests/test_archiver.py @@ -463,7 +463,7 @@ def test_stat_addon(self): @mock.patch('website.archiver.tasks.archive_addon.delay') def test_archive_node_pass(self, mock_archive_addon): - settings.MAX_ARCHIVE_SIZE = 1024 ** 3 + settings.STORAGE_LIMIT_PRIVATE = 1 # 1gb with mock.patch.object(BaseStorageAddon, '_get_file_tree') as mock_file_tree: mock_file_tree.return_value = FILE_TREE results = [stat_addon(addon, self.archive_job._id) for addon in ['osfstorage']] @@ -474,8 +474,9 @@ def test_archive_node_pass(self, mock_archive_addon): ) @use_fake_addons - def test_archive_node_fail(self): - settings.MAX_ARCHIVE_SIZE = 100 + @mock.patch('website.archiver.tasks.archive_addon.delay') + def test_archive_node_fail(self, mock_archive_addon): + settings.STORAGE_LIMIT_PRIVATE = 500 / 1024 ** 3 # 500 KB results = [stat_addon(addon, self.archive_job._id) for addon in ['osfstorage', 'dropbox']] with pytest.raises(ArchiverSizeExceeded): # Note: Requires task_eager_propagates = True in celery archive_node.apply(args=(results, self.archive_job._id)) @@ -503,7 +504,7 @@ def empty_file_tree(user, version): @use_fake_addons @mock.patch('website.archiver.tasks.archive_addon.delay') def test_archive_node_no_archive_size_limit(self, mock_archive_addon): - settings.MAX_ARCHIVE_SIZE = 100 + settings.STORAGE_LIMIT_PRIVATE = 100 / 1024 ** 3 # 100KB self.archive_job.initiator.add_system_tag(NO_ARCHIVE_LIMIT) self.archive_job.initiator.save() with mock.patch.object(BaseStorageAddon, '_get_file_tree') as mock_file_tree: @@ -515,6 +516,68 @@ def test_archive_node_no_archive_size_limit(self, mock_archive_addon): job_pk=self.archive_job._id, ) + @use_fake_addons + @mock.patch('website.archiver.tasks.archive_addon.delay') + def test_archive_node_fail_and_use_updated_storage_size_limit(self, mock_archive_addon): + self.src.is_public = True + self.src.save() + draft_reg = DraftRegistration.objects.get(registered_node=self.dst) + draft_reg.custom_storage_usage_limit = 2 + draft_reg.save() + with mock.patch.object(BaseStorageAddon, '_get_file_tree') as mock_file_tree: + FILE_TREE['children'][0]['size'] = 1024 ** 3 + 1 # 1GB + 1 kilobyte + mock_file_tree.return_value = FILE_TREE + results = [stat_addon(addon, self.archive_job._id) for addon in ['osfstorage', 'dropbox']] + + with self.assertRaises(ArchiverSizeExceeded): + archive_node(results, self.archive_job._id) + + FILE_TREE['children'][0]['size'] = '128' + + @use_fake_addons + @mock.patch('website.archiver.tasks.archive_addon.delay') + def test_archive_node_success_and_use_updated_storage_size_limit(self, mock_archive_addon): + self.src.is_public = True + self.src.save() + draft_reg = DraftRegistration.objects.get(registered_node=self.dst) + draft_reg.custom_storage_usage_limit = 3 + draft_reg.save() + with mock.patch.object(BaseStorageAddon, '_get_file_tree') as mock_file_tree: + FILE_TREE['children'][0]['size'] = 1024 ** 3 # 1GB + mock_file_tree.return_value = FILE_TREE + results = [stat_addon(addon, self.archive_job._id) for addon in ['osfstorage', 'dropbox']] + + archive_node(results, self.archive_job._id) + FILE_TREE['children'][0]['size'] = '128' + + @use_fake_addons + @mock.patch('website.archiver.tasks.archive_addon.delay') + def test_archive_node_fail_and_use_default_storage_size_limit(self, mock_archive_addon): + self.src.is_public = True + self.src.save() + with mock.patch.object(BaseStorageAddon, '_get_file_tree') as mock_file_tree: + settings.STORAGE_LIMIT_PRIVATE = 4 + FILE_TREE['children'][0]['size'] = 1024 ** 3 * 2 # 2GB + mock_file_tree.return_value = FILE_TREE + results = [stat_addon(addon, self.archive_job._id) for addon in ['osfstorage', 'dropbox']] + + with self.assertRaises(ArchiverSizeExceeded): + archive_node(results, self.archive_job._id) + + FILE_TREE['children'][0]['size'] = '128' + + @use_fake_addons + @mock.patch('website.archiver.tasks.archive_addon.delay') + def test_archive_node_success_and_use_default_private_storage_size_limit(self, mock_archive_addon): + with mock.patch.object(BaseStorageAddon, '_get_file_tree') as mock_file_tree: + settings.STORAGE_LIMIT_PRIVATE = 4 + FILE_TREE['children'][0]['size'] = 1024 ** 3 # 1GB + mock_file_tree.return_value = FILE_TREE + results = [stat_addon(addon, self.archive_job._id) for addon in ['osfstorage', 'dropbox']] + + archive_node(results, self.archive_job._id) + FILE_TREE['children'][0]['size'] = '128' + @mock.patch('website.archiver.tasks.make_copy_request.delay') def test_archive_addon(self, mock_make_copy_request): archive_addon('osfstorage', self.archive_job._id) @@ -787,6 +850,7 @@ def test_handle_archive_fail_size(self, mock_send_mail): stat_result={}, can_change_preferences=False, url=url, + draft_registration=DraftRegistration.objects.get(registered_node=self.dst) ) mock_send_mail.assert_has_calls([ call(**args_user), diff --git a/website/archiver/tasks.py b/website/archiver/tasks.py index f8c3b18feb1..bbe1a1e1ed8 100644 --- a/website/archiver/tasks.py +++ b/website/archiver/tasks.py @@ -279,21 +279,26 @@ def archive_node(stat_results, job_pk): dst.title, targets=stat_results ) - if (NO_ARCHIVE_LIMIT not in job.initiator.system_tags) and (stat_result.disk_usage > settings.MAX_ARCHIVE_SIZE): + + draft_registration = DraftRegistration.objects.get(registered_node=dst) + disk_usage_in_gb = stat_result.disk_usage / 1024 ** 3 + limit = draft_registration.custom_storage_usage_limit or settings.STORAGE_LIMIT_PRIVATE + + if (NO_ARCHIVE_LIMIT not in job.initiator.system_tags) and (disk_usage_in_gb > limit): raise ArchiverSizeExceeded(result=stat_result) - else: - if not stat_result.targets: - job.status = ARCHIVER_SUCCESS - job.save() - for result in stat_result.targets: - if not result['num_files']: - job.update_target(result['target_name'], ARCHIVER_SUCCESS) - else: - archive_addon.delay( - addon_short_name=result['target_name'], - job_pk=job_pk - ) - project_signals.archive_callback.send(dst) + + if not stat_result.targets: + job.status = ARCHIVER_SUCCESS + job.save() + for result in stat_result.targets: + if not result['num_files']: + job.update_target(result['target_name'], ARCHIVER_SUCCESS) + else: + archive_addon.delay( + addon_short_name=result['target_name'], + job_pk=job_pk + ) + project_signals.archive_callback.send(dst) def archive(job_pk): diff --git a/website/archiver/utils.py b/website/archiver/utils.py index 44cd7517413..f54284d033f 100644 --- a/website/archiver/utils.py +++ b/website/archiver/utils.py @@ -28,7 +28,7 @@ def normalize_unicode_filenames(filename): ] -def send_archiver_size_exceeded_mails(src, user, stat_result, url): +def send_archiver_size_exceeded_mails(src, user, stat_result, url, draft_registration): mails.send_mail( to_addr=settings.OSF_SUPPORT_EMAIL, mail=mails.ARCHIVE_SIZE_EXCEEDED_DESK, @@ -37,6 +37,7 @@ def send_archiver_size_exceeded_mails(src, user, stat_result, url): stat_result=stat_result, can_change_preferences=False, url=url, + draft_registration=draft_registration ) mails.send_mail( to_addr=user.username, @@ -106,11 +107,14 @@ def send_archiver_uncaught_error_mails(src, user, results, url): def handle_archive_fail(reason, src, dst, user, result): + from osf.models import DraftRegistration + url = settings.INTERNAL_DOMAIN + src._id if reason == ARCHIVER_NETWORK_ERROR: send_archiver_copy_error_mails(src, user, result, url) elif reason == ARCHIVER_SIZE_EXCEEDED: - send_archiver_size_exceeded_mails(src, user, result, url) + draft_registration = DraftRegistration.objects.get(registered_node=dst) + send_archiver_size_exceeded_mails(src, user, result, url, draft_registration) elif reason == ARCHIVER_FILE_NOT_FOUND: send_archiver_file_not_found_mails(src, user, result, url) elif reason == ARCHIVER_FORCED_FAILURE: # Forced failure using scripts.force_fail_registration diff --git a/website/settings/defaults.py b/website/settings/defaults.py index d891e886873..fb3b1b89272 100644 --- a/website/settings/defaults.py +++ b/website/settings/defaults.py @@ -372,8 +372,6 @@ def parent_dir(path): ###### ARCHIVER ########### ARCHIVE_PROVIDER = 'osfstorage' -MAX_ARCHIVE_SIZE = 5 * 1024 ** 3 # == math.pow(1024, 3) == 1 GB - ARCHIVE_TIMEOUT_TIMEDELTA = timedelta(1) # 24 hours STUCK_FILES_DELETE_TIMEOUT = timedelta(days=45) # Registration files stuck for x days are marked as deleted. diff --git a/website/templates/emails/archive_size_exceeded_desk.html.mako b/website/templates/emails/archive_size_exceeded_desk.html.mako index 8b4376c1c0d..801b69c244e 100644 --- a/website/templates/emails/archive_size_exceeded_desk.html.mako +++ b/website/templates/emails/archive_size_exceeded_desk.html.mako @@ -9,14 +9,8 @@ - User: ${user.fullname} (${user.username}) [${user._id}] - - Tried to register ${src.title} (${url}), but the resulting archive would have exceeded our caps for disk usage (${settings.MAX_ARCHIVE_SIZE / 1024 ** 3}GB). -
- - A report is included below: - -
    ${str(stat_result)}
+ Hi, + We couldn’t complete the registration for ${src.title} because its' size exceeds your limit of ${str(draft_registration.custom_storage_usage_limit).rstrip('0')}GB.