diff --git a/CHANGELOG b/CHANGELOG index 3f013526f33..da406c46bf7 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -2,6 +2,13 @@ We follow the CalVer (https://calver.org/) versioning scheme: YY.MINOR.MICRO. +19.30.0 (2019-10-16) +=================== +- Fix weirdness around deleted nodes by not deleing OSF Storage +- Make deleted fields on models and addons into date fields +- API v2: Chronos users can have more name options +- Python 3 backwards compatibility changes + 19.29.0 (2019-10-02) =================== - Use new pagecounter fields for increased query efficiency diff --git a/PULL_REQUEST_TEMPLATE.md b/PULL_REQUEST_TEMPLATE.md index 32b05fd4ce6..3b541818725 100644 --- a/PULL_REQUEST_TEMPLATE.md +++ b/PULL_REQUEST_TEMPLATE.md @@ -15,11 +15,14 @@ ## QA Notes - ## Documentation diff --git a/addons/base/logger.py b/addons/base/logger.py index a8c40ad6ca1..a09abc15df0 100644 --- a/addons/base/logger.py +++ b/addons/base/logger.py @@ -13,7 +13,7 @@ def addon_short_name(self): pass def _log_params(self): - node_settings = self.node.get_addon(self.addon_short_name, deleted=True) + node_settings = self.node.get_addon(self.addon_short_name, is_deleted=True) return { 'project': self.node.parent_id, 'node': self.node._primary_key, diff --git a/addons/base/models.py b/addons/base/models.py index 33aad7b99c3..e39cd10f1e9 100644 --- a/addons/base/models.py +++ b/addons/base/models.py @@ -5,6 +5,7 @@ import markupsafe import requests from django.db import models +from django.utils import timezone from framework.auth import Auth from framework.auth.decorators import must_be_logged_in from framework.exceptions import HTTPError, PermissionsError @@ -14,6 +15,7 @@ from osf.models.node import AbstractNode from osf.models.user import OSFUser from osf.utils.datetime_aware_jsonfield import DateTimeAwareJSONField +from osf.utils.fields import NonNaiveDateTimeField from website import settings from addons.base import logger, serializer from website.oauth.signals import oauth_complete @@ -38,7 +40,8 @@ class BaseAddonSettings(ObjectIDMixin, BaseModel): - deleted = models.BooleanField(default=False) + is_deleted = models.BooleanField(default=False) + deleted = NonNaiveDateTimeField(null=True, blank=True) class Meta: abstract = True @@ -52,13 +55,15 @@ def short_name(self): return self.config.short_name def delete(self, save=True): - self.deleted = True + self.is_deleted = True + self.deleted = timezone.now() self.on_delete() if save: self.save() def undelete(self, save=True): - self.deleted = False + self.is_deleted = False + self.deleted = None self.on_add() if save: self.save() @@ -215,7 +220,7 @@ def revoke_oauth_access(self, external_account, auth, save=True): """ for node in self.get_nodes_with_oauth_grants(external_account): try: - node.get_addon(external_account.provider, deleted=True).deauthorize(auth=auth) + node.get_addon(external_account.provider, is_deleted=True).deauthorize(auth=auth) except AttributeError: # No associated addon settings despite oauth grant pass diff --git a/addons/base/tests/models.py b/addons/base/tests/models.py index 3fcb3741d0a..3fa4fe6441a 100644 --- a/addons/base/tests/models.py +++ b/addons/base/tests/models.py @@ -2,6 +2,8 @@ import mock import pytest +import pytz +import datetime from addons.base.tests.utils import MockFolder from django.utils import timezone from framework.auth import Auth @@ -304,11 +306,14 @@ def test_delete(self): assert_true(self.node_settings.user_settings) assert_true(self.node_settings.folder_id) old_logs = list(self.node.logs.all()) - self.node_settings.delete() + mock_now = datetime.datetime(2017, 3, 16, 11, 00, tzinfo=pytz.utc) + with mock.patch.object(timezone, 'now', return_value=mock_now): + self.node_settings.delete() self.node_settings.save() assert_is(self.node_settings.user_settings, None) assert_is(self.node_settings.folder_id, None) - assert_true(self.node_settings.deleted) + assert_true(self.node_settings.is_deleted) + assert_equal(self.node_settings.deleted, mock_now) assert_equal(list(self.node.logs.all()), list(old_logs)) def test_on_delete(self): diff --git a/addons/base/views.py b/addons/base/views.py index f268a932e04..12398e2e502 100644 --- a/addons/base/views.py +++ b/addons/base/views.py @@ -293,7 +293,9 @@ def get_auth(auth, **kwargs): raise HTTPError(http_status.HTTP_400_BAD_REQUEST) node = AbstractNode.load(node_id) or Preprint.load(node_id) - if not node: + if node and node.is_deleted: + raise HTTPError(http_status.HTTP_410_GONE) + elif not node: raise HTTPError(http_status.HTTP_404_NOT_FOUND) check_access(node, auth, action, cas_resp) @@ -603,11 +605,12 @@ def addon_deleted_file(auth, target, error_type='BLAME_PROVIDER', **kwargs): # Allow file_node to be passed in so other views can delegate to this one file_node = kwargs.get('file_node') or TrashedFileNode.load(kwargs.get('trashed_id')) - deleted_by, deleted_on = None, None + deleted_by, deleted_on, deleted = None, None, None if isinstance(file_node, TrashedFileNode): deleted_by = file_node.deleted_by deleted_by_guid = file_node.deleted_by._id if deleted_by else None deleted_on = file_node.deleted_on.strftime('%c') + ' UTC' + deleted = deleted_on if getattr(file_node, 'suspended', False): error_type = 'FILE_SUSPENDED' elif file_node.deleted_by is None or (auth.private_key and auth.private_link.anonymous): @@ -633,7 +636,8 @@ def addon_deleted_file(auth, target, error_type='BLAME_PROVIDER', **kwargs): file_name=markupsafe.escape(file_name), deleted_by=markupsafe.escape(getattr(deleted_by, 'fullname', None)), deleted_on=markupsafe.escape(deleted_on), - provider=markupsafe.escape(provider_full) + provider=markupsafe.escape(provider_full), + deleted=markupsafe.escape(deleted) ) if deleted_by: format_params['deleted_by_guid'] = markupsafe.escape(deleted_by_guid) diff --git a/addons/bitbucket/migrations/0003_rename_deleted_field.py b/addons/bitbucket/migrations/0003_rename_deleted_field.py new file mode 100644 index 00000000000..7d109973199 --- /dev/null +++ b/addons/bitbucket/migrations/0003_rename_deleted_field.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.15 on 2019-06-27 20:29 +from __future__ import unicode_literals + +from django.db import migrations +import osf.utils.fields + + +class Migration(migrations.Migration): + + dependencies = [ + ('addons_bitbucket', '0002_auto_20170808_1140'), + ] + + operations = [ + migrations.RenameField( + model_name='nodesettings', + new_name='is_deleted', + old_name='deleted', + ), + migrations.RenameField( + model_name='usersettings', + new_name='is_deleted', + old_name='deleted', + ), + migrations.AddField( + model_name='nodesettings', + name='deleted', + field=osf.utils.fields.NonNaiveDateTimeField(blank=True, null=True), + ), + migrations.AddField( + model_name='usersettings', + name='deleted', + field=osf.utils.fields.NonNaiveDateTimeField(blank=True, null=True), + ), + ] diff --git a/addons/bitbucket/settings/__init__.py b/addons/bitbucket/settings/__init__.py index 228e7ddf51c..2b2f98881f6 100644 --- a/addons/bitbucket/settings/__init__.py +++ b/addons/bitbucket/settings/__init__.py @@ -6,5 +6,5 @@ try: from .local import * # noqa -except ImportError as error: +except ImportError: logger.warn('No local.py settings file found') diff --git a/addons/box/migrations/0004_rename_deleted_field.py b/addons/box/migrations/0004_rename_deleted_field.py new file mode 100644 index 00000000000..c4a358452d6 --- /dev/null +++ b/addons/box/migrations/0004_rename_deleted_field.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.15 on 2019-06-27 20:29 +from __future__ import unicode_literals + +from django.db import migrations +import osf.utils.fields + + +class Migration(migrations.Migration): + + dependencies = [ + ('addons_box', '0003_auto_20170713_1125'), + ] + + operations = [ + migrations.RenameField( + model_name='nodesettings', + new_name='is_deleted', + old_name='deleted', + ), + migrations.RenameField( + model_name='usersettings', + new_name='is_deleted', + old_name='deleted', + ), + migrations.AddField( + model_name='nodesettings', + name='deleted', + field=osf.utils.fields.NonNaiveDateTimeField(blank=True, null=True), + ), + migrations.AddField( + model_name='usersettings', + name='deleted', + field=osf.utils.fields.NonNaiveDateTimeField(blank=True, null=True), + ), + ] diff --git a/addons/box/settings/__init__.py b/addons/box/settings/__init__.py index c26f460775d..bce98689d3b 100644 --- a/addons/box/settings/__init__.py +++ b/addons/box/settings/__init__.py @@ -5,5 +5,5 @@ try: from .local import * # noqa -except ImportError as error: +except ImportError: logger.warn('No local.py settings file found') diff --git a/addons/dataverse/migrations/0004_rename_deleted_field.py b/addons/dataverse/migrations/0004_rename_deleted_field.py new file mode 100644 index 00000000000..893865fad4d --- /dev/null +++ b/addons/dataverse/migrations/0004_rename_deleted_field.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.15 on 2019-06-27 20:29 +from __future__ import unicode_literals + +from django.db import migrations +import osf.utils.fields + + +class Migration(migrations.Migration): + + dependencies = [ + ('addons_dataverse', '0003_auto_20170713_1125'), + ] + + operations = [ + migrations.RenameField( + model_name='nodesettings', + new_name='is_deleted', + old_name='deleted', + ), + migrations.RenameField( + model_name='usersettings', + new_name='is_deleted', + old_name='deleted', + ), + migrations.AddField( + model_name='nodesettings', + name='deleted', + field=osf.utils.fields.NonNaiveDateTimeField(blank=True, null=True), + ), + migrations.AddField( + model_name='usersettings', + name='deleted', + field=osf.utils.fields.NonNaiveDateTimeField(blank=True, null=True), + ), + ] diff --git a/addons/dataverse/settings/__init__.py b/addons/dataverse/settings/__init__.py index c26f460775d..bce98689d3b 100644 --- a/addons/dataverse/settings/__init__.py +++ b/addons/dataverse/settings/__init__.py @@ -5,5 +5,5 @@ try: from .local import * # noqa -except ImportError as error: +except ImportError: logger.warn('No local.py settings file found') diff --git a/addons/dataverse/tests/test_client.py b/addons/dataverse/tests/test_client.py index a156162e508..37835869435 100644 --- a/addons/dataverse/tests/test_client.py +++ b/addons/dataverse/tests/test_client.py @@ -180,7 +180,7 @@ def test_get_dataset_calls_patched_timeout_method(self, mock_requests): with assert_raises(Exception) as e: get_dataset(dataverse, 'My hdl') assert_is(mock_requests.get.assert_called_once_with('123', auth='me', timeout=settings.REQUEST_TIMEOUT), None) - assert_equal(e.exception.message, 'Done Testing') + assert_equal(str(e.exception), 'Done Testing') def test_get_deaccessioned_dataset(self): self.mock_dataset.get_state.return_value = 'DEACCESSIONED' diff --git a/addons/dropbox/migrations/0004_rename_deleted_field.py b/addons/dropbox/migrations/0004_rename_deleted_field.py new file mode 100644 index 00000000000..5a0c8aa1f71 --- /dev/null +++ b/addons/dropbox/migrations/0004_rename_deleted_field.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.15 on 2019-06-27 20:29 +from __future__ import unicode_literals + +from django.db import migrations +import osf.utils.fields + + +class Migration(migrations.Migration): + + dependencies = [ + ('addons_dropbox', '0003_auto_20170713_1125'), + ] + + operations = [ + migrations.RenameField( + model_name='nodesettings', + new_name='is_deleted', + old_name='deleted', + ), + migrations.RenameField( + model_name='usersettings', + new_name='is_deleted', + old_name='deleted', + ), + migrations.AddField( + model_name='nodesettings', + name='deleted', + field=osf.utils.fields.NonNaiveDateTimeField(blank=True, null=True), + ), + migrations.AddField( + model_name='usersettings', + name='deleted', + field=osf.utils.fields.NonNaiveDateTimeField(blank=True, null=True), + ), + ] diff --git a/addons/dropbox/settings/__init__.py b/addons/dropbox/settings/__init__.py index c26f460775d..bce98689d3b 100644 --- a/addons/dropbox/settings/__init__.py +++ b/addons/dropbox/settings/__init__.py @@ -5,5 +5,5 @@ try: from .local import * # noqa -except ImportError as error: +except ImportError: logger.warn('No local.py settings file found') diff --git a/addons/figshare/migrations/0004_rename_deleted_field.py b/addons/figshare/migrations/0004_rename_deleted_field.py new file mode 100644 index 00000000000..e801ec2efd3 --- /dev/null +++ b/addons/figshare/migrations/0004_rename_deleted_field.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.15 on 2019-06-27 20:29 +from __future__ import unicode_literals + +from django.db import migrations +import osf.utils.fields + + +class Migration(migrations.Migration): + + dependencies = [ + ('addons_figshare', '0003_auto_20170713_1125'), + ] + + operations = [ + migrations.RenameField( + model_name='nodesettings', + new_name='is_deleted', + old_name='deleted', + ), + migrations.RenameField( + model_name='usersettings', + new_name='is_deleted', + old_name='deleted', + ), + migrations.AddField( + model_name='nodesettings', + name='deleted', + field=osf.utils.fields.NonNaiveDateTimeField(blank=True, null=True), + ), + migrations.AddField( + model_name='usersettings', + name='deleted', + field=osf.utils.fields.NonNaiveDateTimeField(blank=True, null=True), + ), + ] diff --git a/addons/figshare/settings/__init__.py b/addons/figshare/settings/__init__.py index c26f460775d..bce98689d3b 100644 --- a/addons/figshare/settings/__init__.py +++ b/addons/figshare/settings/__init__.py @@ -5,5 +5,5 @@ try: from .local import * # noqa -except ImportError as error: +except ImportError: logger.warn('No local.py settings file found') diff --git a/addons/forward/migrations/0004_rename_deleted_field.py b/addons/forward/migrations/0004_rename_deleted_field.py new file mode 100644 index 00000000000..5156df292bf --- /dev/null +++ b/addons/forward/migrations/0004_rename_deleted_field.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.15 on 2019-06-27 20:29 +from __future__ import unicode_literals + +from django.db import migrations +import osf.utils.fields + + +class Migration(migrations.Migration): + + dependencies = [ + ('addons_forward', '0003_auto_20170713_1125'), + ] + + operations = [ + migrations.RenameField( + model_name='nodesettings', + new_name='is_deleted', + old_name='deleted', + ), + migrations.AddField( + model_name='nodesettings', + name='deleted', + field=osf.utils.fields.NonNaiveDateTimeField(blank=True, null=True), + ), + ] diff --git a/addons/forward/settings/__init__.py b/addons/forward/settings/__init__.py index 228e7ddf51c..2b2f98881f6 100644 --- a/addons/forward/settings/__init__.py +++ b/addons/forward/settings/__init__.py @@ -6,5 +6,5 @@ try: from .local import * # noqa -except ImportError as error: +except ImportError: logger.warn('No local.py settings file found') diff --git a/addons/github/migrations/0004_rename_deleted_field.py b/addons/github/migrations/0004_rename_deleted_field.py new file mode 100644 index 00000000000..21852a0dbdd --- /dev/null +++ b/addons/github/migrations/0004_rename_deleted_field.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.15 on 2019-06-27 20:29 +from __future__ import unicode_literals + +from django.db import migrations +import osf.utils.fields + + +class Migration(migrations.Migration): + + dependencies = [ + ('addons_github', '0003_auto_20170713_1125'), + ] + + operations = [ + migrations.RenameField( + model_name='nodesettings', + new_name='is_deleted', + old_name='deleted', + ), + migrations.RenameField( + model_name='usersettings', + new_name='is_deleted', + old_name='deleted', + ), + migrations.AddField( + model_name='nodesettings', + name='deleted', + field=osf.utils.fields.NonNaiveDateTimeField(blank=True, null=True), + ), + migrations.AddField( + model_name='usersettings', + name='deleted', + field=osf.utils.fields.NonNaiveDateTimeField(blank=True, null=True), + ), + ] diff --git a/addons/github/settings/__init__.py b/addons/github/settings/__init__.py index 228e7ddf51c..2b2f98881f6 100644 --- a/addons/github/settings/__init__.py +++ b/addons/github/settings/__init__.py @@ -6,5 +6,5 @@ try: from .local import * # noqa -except ImportError as error: +except ImportError: logger.warn('No local.py settings file found') diff --git a/addons/github/utils.py b/addons/github/utils.py index c09ede234f5..1c12335b90e 100644 --- a/addons/github/utils.py +++ b/addons/github/utils.py @@ -35,8 +35,8 @@ def verify_hook_signature(node_settings, data, headers): if node_settings.hook_secret is None: raise HookError('No secret key') digest = hmac.new( - str(node_settings.hook_secret), - data, + node_settings.hook_secret.encode('utf-8'), + data.encode('utf-8'), digestmod=hashlib.sha1 ).hexdigest() signature = headers.get(HOOK_SIGNATURE_KEY, '').replace('sha1=', '') diff --git a/addons/gitlab/migrations/0004_auto_20190627_2029.py b/addons/gitlab/migrations/0004_auto_20190627_2029.py new file mode 100644 index 00000000000..b8294b8472b --- /dev/null +++ b/addons/gitlab/migrations/0004_auto_20190627_2029.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.15 on 2019-06-27 20:29 +from __future__ import unicode_literals + +from django.db import migrations +import osf.utils.fields + + +class Migration(migrations.Migration): + + dependencies = [ + ('addons_gitlab', '0003_add_schemes_to_schemeless_hosts'), + ] + + operations = [ + migrations.RenameField( + model_name='nodesettings', + new_name='is_deleted', + old_name='deleted', + ), + migrations.RenameField( + model_name='usersettings', + new_name='is_deleted', + old_name='deleted', + ), + migrations.AddField( + model_name='nodesettings', + name='deleted', + field=osf.utils.fields.NonNaiveDateTimeField(blank=True, null=True), + ), + migrations.AddField( + model_name='usersettings', + name='deleted', + field=osf.utils.fields.NonNaiveDateTimeField(blank=True, null=True), + ), + ] diff --git a/addons/gitlab/settings/__init__.py b/addons/gitlab/settings/__init__.py index 228e7ddf51c..2b2f98881f6 100644 --- a/addons/gitlab/settings/__init__.py +++ b/addons/gitlab/settings/__init__.py @@ -6,5 +6,5 @@ try: from .local import * # noqa -except ImportError as error: +except ImportError: logger.warn('No local.py settings file found') diff --git a/addons/gitlab/tests/test_views.py b/addons/gitlab/tests/test_views.py index 04ae8f73e64..b601c317eca 100644 --- a/addons/gitlab/tests/test_views.py +++ b/addons/gitlab/tests/test_views.py @@ -195,14 +195,25 @@ def test_get_refs_sha_no_branch(self): # Tests for _check_permissions # make a user with no authorization; make sure check_permissions returns false - def test_permissions_no_auth(self): + @mock.patch('addons.gitlab.api.GitLabClient.repo') + def test_permissions_no_auth(self, mock_repo): gitlab_mock = self.gitlab # project is set to private right now + mock_repository = mock.Mock(**{ + 'user': 'fred', + 'repo': 'mock-repo', + 'permissions': { + 'project_access': {'access_level': 20, 'notification_level': 3} + }, + }) + mock_repo.attributes.return_value = mock_repository + + connection = gitlab_mock non_authenticated_user = UserFactory() non_authenticated_auth = Auth(user=non_authenticated_user) branch = 'master' - assert_false(check_permissions(self.node_settings, non_authenticated_auth, connection, branch)) + assert_false(check_permissions(self.node_settings, non_authenticated_auth, connection, branch, repo=mock_repository)) # make a repository that doesn't allow push access for this user; # make sure check_permissions returns false @@ -233,19 +244,36 @@ def test_permissions_not_head(self, mock_repo, mock_has_auth): mock_branch = mock.Mock(**{ 'commit': {'id': '67890'} }) + mock_repository = mock.Mock(**{ + 'user': 'fred', + 'repo': 'mock-repo', + 'permissions': { + 'project_access': {'access_level': 20, 'notification_level': 3} + }, + }) + mock_repo.attributes.return_value = mock_repository connection.branches.return_value = mock_branch sha = '12345' - assert_false(check_permissions(self.node_settings, self.consolidated_auth, connection, mock_branch, sha=sha)) + assert_false(check_permissions(self.node_settings, self.consolidated_auth, connection, mock_branch, sha=sha, repo=mock_repository)) # make sure permissions are not granted for editing a registration @mock.patch('addons.gitlab.models.UserSettings.has_auth') - def test_permissions(self, mock_has_auth): + @mock.patch('addons.gitlab.api.GitLabClient.repo') + def test_permissions(self, mock_repo, mock_has_auth): gitlab_mock = self.gitlab mock_has_auth.return_value = True connection = gitlab_mock + mock_repository = mock.Mock(**{ + 'user': 'fred', + 'repo': 'mock-repo', + 'permissions': { + 'project_access': {'access_level': 20, 'notification_level': 3} + }, + }) + mock_repo.attributes.return_value = mock_repository with mock.patch('osf.models.node.AbstractNode.is_registration', new_callable=mock.PropertyMock) as mock_is_reg: mock_is_reg.return_value = True - assert_false(check_permissions(self.node_settings, self.consolidated_auth, connection, 'master')) + assert_false(check_permissions(self.node_settings, self.consolidated_auth, connection, 'master', repo=mock_repository)) def check_hook_urls(self, urls, node, path, sha): url = node.web_url_for('addon_view_or_download_file', path=path, provider='gitlab') diff --git a/addons/gitlab/utils.py b/addons/gitlab/utils.py index e5b398b7e47..41b3697b243 100644 --- a/addons/gitlab/utils.py +++ b/addons/gitlab/utils.py @@ -34,8 +34,8 @@ def verify_hook_signature(node_settings, data, headers): if node_settings.hook_secret is None: raise HookError('No secret key') digest = hmac.new( - str(node_settings.hook_secret), - data, + node_settings.hook_secret.encode('utf-8'), + data.encode('utf-8'), digestmod=hashlib.sha1 ).hexdigest() signature = headers.get(HOOK_SIGNATURE_KEY, '').replace('sha1=', '') diff --git a/addons/googledrive/migrations/0004_rename_deleted_field.py b/addons/googledrive/migrations/0004_rename_deleted_field.py new file mode 100644 index 00000000000..7b20e7504c7 --- /dev/null +++ b/addons/googledrive/migrations/0004_rename_deleted_field.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.15 on 2019-06-27 20:29 +from __future__ import unicode_literals + +from django.db import migrations +import osf.utils.fields + + +class Migration(migrations.Migration): + + dependencies = [ + ('addons_googledrive', '0003_auto_20170713_1125'), + ] + + operations = [ + migrations.RenameField( + model_name='nodesettings', + new_name='is_deleted', + old_name='deleted', + ), + migrations.RenameField( + model_name='usersettings', + new_name='is_deleted', + old_name='deleted', + ), + migrations.AddField( + model_name='nodesettings', + name='deleted', + field=osf.utils.fields.NonNaiveDateTimeField(blank=True, null=True), + ), + migrations.AddField( + model_name='usersettings', + name='deleted', + field=osf.utils.fields.NonNaiveDateTimeField(blank=True, null=True), + ), + ] diff --git a/addons/googledrive/settings/__init__.py b/addons/googledrive/settings/__init__.py index d6b6c85b479..4d3fcfa3d4f 100644 --- a/addons/googledrive/settings/__init__.py +++ b/addons/googledrive/settings/__init__.py @@ -4,5 +4,5 @@ logger = logging.getLogger(__name__) try: from .local import * # noqa -except ImportError as error: +except ImportError: logger.warn('No local.py settings file found') diff --git a/addons/mendeley/migrations/0004_rename_deleted_field.py b/addons/mendeley/migrations/0004_rename_deleted_field.py new file mode 100644 index 00000000000..03811701516 --- /dev/null +++ b/addons/mendeley/migrations/0004_rename_deleted_field.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.15 on 2019-06-27 20:29 +from __future__ import unicode_literals + +from django.db import migrations +import osf.utils.fields + + +class Migration(migrations.Migration): + + dependencies = [ + ('addons_mendeley', '0003_auto_20170713_1125'), + ] + + operations = [ + migrations.RenameField( + model_name='nodesettings', + new_name='is_deleted', + old_name='deleted', + ), + migrations.RenameField( + model_name='usersettings', + new_name='is_deleted', + old_name='deleted', + ), + migrations.AddField( + model_name='nodesettings', + name='deleted', + field=osf.utils.fields.NonNaiveDateTimeField(blank=True, null=True), + ), + migrations.AddField( + model_name='usersettings', + name='deleted', + field=osf.utils.fields.NonNaiveDateTimeField(blank=True, null=True), + ), + ] diff --git a/addons/mendeley/settings/__init__.py b/addons/mendeley/settings/__init__.py index c26f460775d..bce98689d3b 100644 --- a/addons/mendeley/settings/__init__.py +++ b/addons/mendeley/settings/__init__.py @@ -5,5 +5,5 @@ try: from .local import * # noqa -except ImportError as error: +except ImportError: logger.warn('No local.py settings file found') diff --git a/addons/onedrive/migrations/0003_rename_deleted_field.py b/addons/onedrive/migrations/0003_rename_deleted_field.py new file mode 100644 index 00000000000..c95df4ccfa1 --- /dev/null +++ b/addons/onedrive/migrations/0003_rename_deleted_field.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.15 on 2019-06-27 20:29 +from __future__ import unicode_literals + +from django.db import migrations +import osf.utils.fields + + +class Migration(migrations.Migration): + + dependencies = [ + ('addons_onedrive', '0002_auto_20171121_1426'), + ] + + operations = [ + migrations.RenameField( + model_name='nodesettings', + new_name='is_deleted', + old_name='deleted', + ), + migrations.RenameField( + model_name='usersettings', + new_name='is_deleted', + old_name='deleted', + ), + migrations.AddField( + model_name='nodesettings', + name='deleted', + field=osf.utils.fields.NonNaiveDateTimeField(blank=True, null=True), + ), + migrations.AddField( + model_name='usersettings', + name='deleted', + field=osf.utils.fields.NonNaiveDateTimeField(blank=True, null=True), + ), + ] diff --git a/addons/onedrive/models.py b/addons/onedrive/models.py index ce256837bb7..152d6c77553 100644 --- a/addons/onedrive/models.py +++ b/addons/onedrive/models.py @@ -122,9 +122,7 @@ def folder_name(self): return None if self.folder_id != DEFAULT_ROOT_ID: - # `urllib` does not properly handle unicode. - # encode input to `str`, decode output back to `unicode` - return unquote(os.path.split(self.folder_path)[1].encode('utf-8')).decode('utf-8') + return unquote(os.path.split(self.folder_path)[1]) else: return '/ (Full OneDrive)' diff --git a/addons/onedrive/settings/__init__.py b/addons/onedrive/settings/__init__.py index c26f460775d..bce98689d3b 100644 --- a/addons/onedrive/settings/__init__.py +++ b/addons/onedrive/settings/__init__.py @@ -5,5 +5,5 @@ try: from .local import * # noqa -except ImportError as error: +except ImportError: logger.warn('No local.py settings file found') diff --git a/addons/osfstorage/migrations/0006_rename_deleted_field.py b/addons/osfstorage/migrations/0006_rename_deleted_field.py new file mode 100644 index 00000000000..814e92eb2da --- /dev/null +++ b/addons/osfstorage/migrations/0006_rename_deleted_field.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.15 on 2019-06-27 20:29 +from __future__ import unicode_literals + +from django.db import migrations +import osf.utils.fields + + +class Migration(migrations.Migration): + + dependencies = [ + ('addons_osfstorage', '0005_region_mfr_url'), + ] + + operations = [ + migrations.RenameField( + model_name='nodesettings', + new_name='is_deleted', + old_name='deleted', + ), + migrations.RenameField( + model_name='usersettings', + new_name='is_deleted', + old_name='deleted', + ), + migrations.AddField( + model_name='nodesettings', + name='deleted', + field=osf.utils.fields.NonNaiveDateTimeField(blank=True, null=True), + ), + migrations.AddField( + model_name='usersettings', + name='deleted', + field=osf.utils.fields.NonNaiveDateTimeField(blank=True, null=True), + ), + ] diff --git a/addons/osfstorage/settings/__init__.py b/addons/osfstorage/settings/__init__.py index c26f460775d..bce98689d3b 100644 --- a/addons/osfstorage/settings/__init__.py +++ b/addons/osfstorage/settings/__init__.py @@ -5,5 +5,5 @@ try: from .local import * # noqa -except ImportError as error: +except ImportError: logger.warn('No local.py settings file found') diff --git a/addons/osfstorage/tests/test_models.py b/addons/osfstorage/tests/test_models.py index 1fb0606ffaf..806ab64754e 100644 --- a/addons/osfstorage/tests/test_models.py +++ b/addons/osfstorage/tests/test_models.py @@ -10,6 +10,7 @@ from framework.auth import Auth from addons.osfstorage.models import OsfStorageFile, OsfStorageFileNode, OsfStorageFolder +from osf.models import BaseFileNode from osf.exceptions import ValidationError from osf.utils.permissions import WRITE, ADMIN from osf.utils.fields import EncryptedJSONField @@ -99,7 +100,7 @@ def test_serialize(self): u'kind': u'file', u'version': 1, u'downloads': 0, - u'size': 1234L, + u'size': 1234, u'modified': version.created.isoformat(), u'contentType': u'text/plain', u'checkout': None, @@ -120,7 +121,7 @@ def test_serialize(self): u'kind': u'file', u'version': 1, u'downloads': 0, - u'size': 1234L, + u'size': 1234, # modified date is the creation date of latest version # see https://github.com/CenterForOpenScience/osf.io/pull/7155 u'modified': version.created.isoformat(), @@ -223,6 +224,18 @@ def test_delete_folder(self): None ) + def test_delete_root_node(self): + root = self.node_settings.get_root() + folder = root.append_folder('Test') + file = folder.append_file('test_file') + + # If the top-level item is a root, it is not deleted + root.delete() + root.reload() + assert root.type == 'osf.osfstoragefolder' + assert BaseFileNode.objects.get(_id=folder._id).type == 'osf.trashedfolder' + assert BaseFileNode.objects.get(_id=file._id).type == 'osf.trashedfile' + def test_delete_file(self): child = self.node_settings.get_root().append_file('Test') field_names = [f.name for f in child._meta.get_fields() if not f.is_relation and f.name not in ['id', 'content_type_pk']] @@ -237,7 +250,7 @@ def test_delete_file(self): child_storage['materialized_path'] = child.materialized_path assert_equal(trashed.path, '/' + child._id) trashed_field_names = [f.name for f in child._meta.get_fields() if not f.is_relation and - f.name not in ['id', '_materialized_path', 'content_type_pk', '_path', 'deleted_on', 'deleted_by', 'type', 'modified']] + f.name not in ['id', '_materialized_path', 'content_type_pk', '_path', 'deleted', 'deleted_on', 'deleted_by', 'type', 'modified']] for f, value in child_data.items(): if f in trashed_field_names: assert_equal(getattr(trashed, f), value) diff --git a/addons/owncloud/migrations/0005_rename_deleted_field.py b/addons/owncloud/migrations/0005_rename_deleted_field.py new file mode 100644 index 00000000000..c77f3212e10 --- /dev/null +++ b/addons/owncloud/migrations/0005_rename_deleted_field.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.15 on 2019-06-27 20:29 +from __future__ import unicode_literals + +from django.db import migrations +import osf.utils.fields + + +class Migration(migrations.Migration): + + dependencies = [ + ('addons_owncloud', '0004_merge_20171203_1325'), + ] + + operations = [ + migrations.RenameField( + model_name='nodesettings', + new_name='is_deleted', + old_name='deleted', + ), + migrations.RenameField( + model_name='usersettings', + new_name='is_deleted', + old_name='deleted', + ), + migrations.AddField( + model_name='nodesettings', + name='deleted', + field=osf.utils.fields.NonNaiveDateTimeField(blank=True, null=True), + ), + migrations.AddField( + model_name='usersettings', + name='deleted', + field=osf.utils.fields.NonNaiveDateTimeField(blank=True, null=True), + ), + ] diff --git a/addons/owncloud/settings/__init__.py b/addons/owncloud/settings/__init__.py index ca63de80fdf..7f595ceebd1 100644 --- a/addons/owncloud/settings/__init__.py +++ b/addons/owncloud/settings/__init__.py @@ -4,5 +4,5 @@ logger = logging.getLogger(__name__) try: from addons.owncloud.settings.local import * # noqa -except ImportError as error: +except ImportError: logger.warn('No local.py settings file found') diff --git a/addons/s3/migrations/0004_rename_deleted_field.py b/addons/s3/migrations/0004_rename_deleted_field.py new file mode 100644 index 00000000000..94011347ad3 --- /dev/null +++ b/addons/s3/migrations/0004_rename_deleted_field.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.15 on 2019-06-27 20:29 +from __future__ import unicode_literals + +from django.db import migrations +import osf.utils.fields + + +class Migration(migrations.Migration): + + dependencies = [ + ('addons_s3', '0003_auto_20170713_1125'), + ] + + operations = [ + migrations.RenameField( + model_name='nodesettings', + new_name='is_deleted', + old_name='deleted', + ), + migrations.RenameField( + model_name='usersettings', + new_name='is_deleted', + old_name='deleted', + ), + migrations.AddField( + model_name='nodesettings', + name='deleted', + field=osf.utils.fields.NonNaiveDateTimeField(blank=True, null=True), + ), + migrations.AddField( + model_name='usersettings', + name='deleted', + field=osf.utils.fields.NonNaiveDateTimeField(blank=True, null=True), + ), + ] diff --git a/addons/s3/settings/__init__.py b/addons/s3/settings/__init__.py index 228e7ddf51c..2b2f98881f6 100644 --- a/addons/s3/settings/__init__.py +++ b/addons/s3/settings/__init__.py @@ -6,5 +6,5 @@ try: from .local import * # noqa -except ImportError as error: +except ImportError: logger.warn('No local.py settings file found') diff --git a/addons/s3/tests/test_view.py b/addons/s3/tests/test_view.py index 3057a0fa3aa..c6d6cc18191 100644 --- a/addons/s3/tests/test_view.py +++ b/addons/s3/tests/test_view.py @@ -47,6 +47,7 @@ def test_s3_settings_input_empty_keys(self): }, auth=self.user.auth, expect_errors=True) assert_equals(rv.status_int, http_status.HTTP_400_BAD_REQUEST) assert_in('All the fields above are required.', rv.body) + assert_equals(rv.status_int, http_status.HTTP_400_BAD_REQUEST) def test_s3_settings_input_empty_access_key(self): url = self.project.api_url_for('s3_add_user_account') @@ -56,6 +57,7 @@ def test_s3_settings_input_empty_access_key(self): }, auth=self.user.auth, expect_errors=True) assert_equals(rv.status_int, http_status.HTTP_400_BAD_REQUEST) assert_in('All the fields above are required.', rv.body) + assert_equals(rv.status_int, http_status.HTTP_400_BAD_REQUEST) def test_s3_settings_input_empty_secret_key(self): url = self.project.api_url_for('s3_add_user_account') @@ -65,6 +67,7 @@ def test_s3_settings_input_empty_secret_key(self): }, auth=self.user.auth, expect_errors=True) assert_equals(rv.status_int, http_status.HTTP_400_BAD_REQUEST) assert_in('All the fields above are required.', rv.body) + assert_equals(rv.status_int, http_status.HTTP_400_BAD_REQUEST) def test_s3_set_bucket_no_settings(self): user = AuthUserFactory() @@ -108,8 +111,9 @@ def test_user_settings_cant_list(self, mock_can_list): 'access_key': 'aldkjf', 'secret_key': 'las' }, auth=self.user.auth, expect_errors=True) - assert_equals(rv.status_int, http_status.HTTP_400_BAD_REQUEST) + assert_in('Unable to list buckets.', rv.body) + assert_equals(rv.status_int, http_status.HTTP_400_BAD_REQUEST) def test_s3_remove_node_settings_owner(self): url = self.node_settings.owner.api_url_for('s3_deauthorize_node') diff --git a/addons/twofactor/migrations/0004_rename_deleted_field.py b/addons/twofactor/migrations/0004_rename_deleted_field.py new file mode 100644 index 00000000000..399c87961a1 --- /dev/null +++ b/addons/twofactor/migrations/0004_rename_deleted_field.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.15 on 2019-06-27 20:29 +from __future__ import unicode_literals + +from django.db import migrations +import osf.utils.fields + + +class Migration(migrations.Migration): + + dependencies = [ + ('addons_twofactor', '0003_auto_20170713_1125'), + ] + + operations = [ + migrations.RenameField( + model_name='usersettings', + new_name='is_deleted', + old_name='deleted', + ), + migrations.AddField( + model_name='usersettings', + name='deleted', + field=osf.utils.fields.NonNaiveDateTimeField(blank=True, null=True), + ), + ] diff --git a/addons/twofactor/tests/test_models.py b/addons/twofactor/tests/test_models.py index e501b5fdc57..244f3e2e3fb 100644 --- a/addons/twofactor/tests/test_models.py +++ b/addons/twofactor/tests/test_models.py @@ -1,5 +1,5 @@ import unittest -from future.moves.urllib.parse import urlparse, parse_qs +from future.moves.urllib.parse import urlparse, urljoin, parse_qs import pytest from addons.twofactor.tests.utils import _valid_code diff --git a/addons/wiki/migrations/0009_auto_20180302_1404.py b/addons/wiki/migrations/0009_auto_20180302_1404.py index fac3d9de6f9..79a133d362a 100644 --- a/addons/wiki/migrations/0009_auto_20180302_1404.py +++ b/addons/wiki/migrations/0009_auto_20180302_1404.py @@ -14,6 +14,6 @@ class Migration(migrations.Migration): operations = [ migrations.AddIndex( model_name='wikipage', - index=models.Index(fields=[b'page_name', b'node'], name='addons_wiki_page_na_6d5d96_idx'), + index=models.Index(fields=['page_name', 'node'], name='addons_wiki_page_na_6d5d96_idx'), ), ] diff --git a/addons/wiki/migrations/0012_rename_deleted_field.py b/addons/wiki/migrations/0012_rename_deleted_field.py new file mode 100644 index 00000000000..cf8b69560fb --- /dev/null +++ b/addons/wiki/migrations/0012_rename_deleted_field.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.15 on 2019-06-27 20:29 +from __future__ import unicode_literals + +from django.db import migrations +import osf.utils.fields + + +class Migration(migrations.Migration): + + dependencies = [ + ('addons_wiki', '0011_auto_20180415_1649'), + ] + + operations = [ + migrations.RenameField( + model_name='nodesettings', + new_name='is_deleted', + old_name='deleted', + ), + migrations.AddField( + model_name='nodesettings', + name='deleted', + field=osf.utils.fields.NonNaiveDateTimeField(blank=True, null=True), + ), + ] diff --git a/addons/wiki/settings/__init__.py b/addons/wiki/settings/__init__.py index 228e7ddf51c..2b2f98881f6 100644 --- a/addons/wiki/settings/__init__.py +++ b/addons/wiki/settings/__init__.py @@ -6,5 +6,5 @@ try: from .local import * # noqa -except ImportError as error: +except ImportError: logger.warn('No local.py settings file found') diff --git a/addons/wiki/tests/test_wiki.py b/addons/wiki/tests/test_wiki.py index de976becc25..d332d32445b 100644 --- a/addons/wiki/tests/test_wiki.py +++ b/addons/wiki/tests/test_wiki.py @@ -759,8 +759,8 @@ def test_uuid_generated_once(self): self.project.reload() private_uuid = self.project.wiki_private_uuids.get(self.wkey) assert_true(private_uuid) - assert_not_in(private_uuid, res.body) - assert_in(get_sharejs_uuid(self.project, self.wname), res.body) + assert_not_in(private_uuid, res.body.decode()) + assert_in(get_sharejs_uuid(self.project, self.wname), res.body.decode()) # Revisit page; uuid has not changed res = self.app.get(url, auth=self.user.auth) @@ -779,13 +779,13 @@ def test_uuid_not_visible_without_write_permission(self): self.project.reload() private_uuid = self.project.wiki_private_uuids.get(self.wkey) assert_true(private_uuid) - assert_not_in(private_uuid, res.body) - assert_in(get_sharejs_uuid(self.project, self.wname), res.body) + assert_not_in(private_uuid, res.body.decode()) + assert_in(get_sharejs_uuid(self.project, self.wname), res.body.decode()) # Users without write permission should not be able to access res = self.app.get(url) assert_equal(res.status_code, 200) - assert_not_in(get_sharejs_uuid(self.project, self.wname), res.body) + assert_not_in(get_sharejs_uuid(self.project, self.wname), res.body.decode()) def test_uuid_not_generated_without_write_permission(self): WikiPage.objects.create_for_node(self.project, self.wname, 'some content', Auth(self.user)) diff --git a/addons/zotero/migrations/0006_rename_deleted_field.py b/addons/zotero/migrations/0006_rename_deleted_field.py new file mode 100644 index 00000000000..16927d94cdb --- /dev/null +++ b/addons/zotero/migrations/0006_rename_deleted_field.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.15 on 2019-06-27 20:29 +from __future__ import unicode_literals + +from django.db import migrations +import osf.utils.fields + + +class Migration(migrations.Migration): + + dependencies = [ + ('addons_zotero', '0005_zotero_personal_libraries_20180216_0849'), + ] + + operations = [ + migrations.RenameField( + model_name='nodesettings', + new_name='is_deleted', + old_name='deleted', + ), + migrations.RenameField( + model_name='usersettings', + new_name='is_deleted', + old_name='deleted', + ), + migrations.AddField( + model_name='nodesettings', + name='deleted', + field=osf.utils.fields.NonNaiveDateTimeField(blank=True, null=True), + ), + migrations.AddField( + model_name='usersettings', + name='deleted', + field=osf.utils.fields.NonNaiveDateTimeField(blank=True, null=True), + ), + ] diff --git a/addons/zotero/settings/__init__.py b/addons/zotero/settings/__init__.py index c26f460775d..bce98689d3b 100644 --- a/addons/zotero/settings/__init__.py +++ b/addons/zotero/settings/__init__.py @@ -5,5 +5,5 @@ try: from .local import * # noqa -except ImportError as error: +except ImportError: logger.warn('No local.py settings file found') diff --git a/addons/zotero/tests/test_views.py b/addons/zotero/tests/test_views.py index 959f1ab54ac..d69d6fc8508 100644 --- a/addons/zotero/tests/test_views.py +++ b/addons/zotero/tests/test_views.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- import mock import pytest -from future.moves.urllib.parse import urljoin +from future.moves.urllib.parse import urlparse, urljoin import responses from framework.auth import Auth diff --git a/admin/base/settings/__init__.py b/admin/base/settings/__init__.py index d1cbe42d497..e9269fbda58 100644 --- a/admin/base/settings/__init__.py +++ b/admin/base/settings/__init__.py @@ -12,6 +12,6 @@ try: from .local import * # noqa -except ImportError as error: +except ImportError: warnings.warn('No admin/base/settings/local.py settings file found. Did you remember to ' 'copy local-dist.py to local.py?', ImportWarning) diff --git a/admin/nodes/views.py b/admin/nodes/views.py index 2561c6a5514..4d292168b80 100644 --- a/admin/nodes/views.py +++ b/admin/nodes/views.py @@ -157,12 +157,14 @@ def delete(self, request, *args, **kwargs): if node.is_deleted: node.is_deleted = False node.deleted_date = None + node.deleted = None flag = NODE_RESTORED message = 'Node {} restored.'.format(node.pk) osf_flag = NodeLog.NODE_CREATED elif not node.is_registration: node.is_deleted = True - node.deleted_date = timezone.now() + node.deleted = timezone.now() + node.deleted_date = node.deleted flag = NODE_REMOVED message = 'Node {} removed.'.format(node.pk) osf_flag = NodeLog.NODE_REMOVED diff --git a/admin/spam/views.py b/admin/spam/views.py index 4baa42a4c13..01772432adf 100644 --- a/admin/spam/views.py +++ b/admin/spam/views.py @@ -3,6 +3,7 @@ from django.views.generic import FormView, ListView, DetailView from django.contrib.auth.mixins import PermissionRequiredMixin from django.http import Http404 +from django.utils import timezone from osf.models.comment import Comment from osf.models.user import OSFUser @@ -112,11 +113,13 @@ def form_valid(self, form): if int(form.cleaned_data.get('confirm')) == SpamStatus.SPAM: item.confirm_spam() item.is_deleted = True + item.deleted = timezone.now() log_message = 'Confirmed SPAM: {}'.format(spam_id) log_action = CONFIRM_SPAM else: item.confirm_ham() item.is_deleted = False + item.deleted = None log_message = 'Confirmed HAM: {}'.format(spam_id) log_action = CONFIRM_HAM item.save() diff --git a/admin_tests/base/test_utils.py b/admin_tests/base/test_utils.py index 3506c57534c..8053fe7fabb 100644 --- a/admin_tests/base/test_utils.py +++ b/admin_tests/base/test_utils.py @@ -53,7 +53,7 @@ def test_just_toplevel_subject(self): subjects_selected = [self.parent_one] rules_returned = get_subject_rules(subjects_selected) rules_ideal = [[[self.parent_one._id], False]] - self.assertItemsEqual(rules_returned, rules_ideal) + assert rules_returned == rules_ideal def test_two_toplevel_subjects(self): subjects_selected = [ @@ -65,7 +65,7 @@ def test_two_toplevel_subjects(self): [[self.parent_one._id], False], [[self.parent_two._id], False] ] - self.assertItemsEqual(rules_returned, rules_ideal) + assert rules_returned == rules_ideal def test_one_child(self): subjects_selected = [ @@ -74,7 +74,7 @@ def test_one_child(self): ] rules_returned = get_subject_rules(subjects_selected) rules_ideal = [[[self.parent_one._id, self.child_one_1._id], False]] - self.assertItemsEqual(rules_returned, rules_ideal) + assert rules_returned == rules_ideal def test_one_child_all_grandchildren(self): subjects_selected = [ @@ -85,7 +85,7 @@ def test_one_child_all_grandchildren(self): ] rules_returned = get_subject_rules(subjects_selected) rules_ideal = [[[self.parent_one._id, self.child_one_1._id], True]] - self.assertItemsEqual(rules_returned, rules_ideal) + assert rules_returned == rules_ideal def test_all_children_all_grandchildren(self): subjects_selected = [ @@ -97,7 +97,7 @@ def test_all_children_all_grandchildren(self): ] rules_returned = get_subject_rules(subjects_selected) rules_ideal = [[[self.parent_one._id], True]] - self.assertItemsEqual(rules_returned, rules_ideal) + assert rules_returned == rules_ideal def test_one_child_with_one_grandchild(self): subjects_selected = [ @@ -109,7 +109,7 @@ def test_one_child_with_one_grandchild(self): rules_ideal = [ [[self.parent_one._id, self.child_one_1._id, self.grandchild_one_1._id], False] ] - self.assertItemsEqual(rules_returned, rules_ideal) + assert rules_returned == rules_ideal def test_rules_to_subjects(self): rules = [ @@ -117,8 +117,7 @@ def test_rules_to_subjects(self): ] subject_queryset_ideal = Subject.objects.filter(Q(id=self.parent_one.id) | Q(id=self.child_one_1.id)) returned_subjects = rules_to_subjects(rules) - - self.assertItemsEqual(subject_queryset_ideal, returned_subjects) + assert list(subject_queryset_ideal) == list(returned_subjects) class TestNodeChanges(AdminTestCase): def setUp(self): diff --git a/admin_tests/institutions/test_views.py b/admin_tests/institutions/test_views.py index 02fa662cf8c..98832e6c7e3 100644 --- a/admin_tests/institutions/test_views.py +++ b/admin_tests/institutions/test_views.py @@ -40,7 +40,7 @@ def test_get_list(self, *args, **kwargs): def test_get_queryset(self): institutions_returned = list(self.view.get_queryset()) inst_list = [self.institution1, self.institution2] - nt.assert_items_equal(institutions_returned, inst_list) + nt.assert_equals(set(institutions_returned), set(inst_list)) nt.assert_is_instance(institutions_returned[0], Institution) def test_context_data(self): @@ -246,5 +246,5 @@ def test_get_view(self): def test_get_queryset(self): nodes_returned = list(self.view.get_queryset()) node_list = [self.node1, self.node2] - nt.assert_items_equal(nodes_returned, node_list) + nt.assert_equals(nodes_returned, node_list) nt.assert_is_instance(nodes_returned[0], Node) diff --git a/admin_tests/nodes/test_views.py b/admin_tests/nodes/test_views.py index eacf8077453..b9dc77ff1ef 100644 --- a/admin_tests/nodes/test_views.py +++ b/admin_tests/nodes/test_views.py @@ -1,6 +1,8 @@ import datetime as dt import pytest import mock +import pytz +import datetime from osf.models import AdminLogEntry, OSFUser, Node, NodeLog from admin.nodes.views import ( @@ -135,19 +137,24 @@ def test_get_context(self): def test_remove_node(self): count = AdminLogEntry.objects.count() - self.view.delete(self.request) + mock_now = datetime.datetime(2017, 3, 16, 11, 00, tzinfo=pytz.utc) + with mock.patch.object(timezone, 'now', return_value=mock_now): + self.view.delete(self.request) self.node.refresh_from_db() nt.assert_true(self.node.is_deleted) nt.assert_equal(AdminLogEntry.objects.count(), count + 1) + nt.assert_equal(self.node.deleted, mock_now) def test_restore_node(self): self.view.delete(self.request) self.node.refresh_from_db() nt.assert_true(self.node.is_deleted) + nt.assert_true(self.node.deleted is not None) count = AdminLogEntry.objects.count() self.view.delete(self.request) self.node.reload() nt.assert_false(self.node.is_deleted) + nt.assert_true(self.node.deleted is None) nt.assert_equal(AdminLogEntry.objects.count(), count + 1) def test_no_user_permissions_raises_error(self): @@ -486,6 +493,7 @@ def test_remove_stuck_registration(self): self.registration.refresh_from_db() nt.assert_true(self.registration.is_deleted) + nt.assert_true(self.registration.deleted is not None) def test_remove_stuck_registration_with_an_addon(self): # Prevents circular import that prevents admin app from starting up @@ -499,3 +507,4 @@ def test_remove_stuck_registration_with_an_addon(self): view.post(self.request) self.registration.refresh_from_db() nt.assert_true(self.registration.is_deleted) + nt.assert_true(self.registration.deleted is not None) diff --git a/admin_tests/users/test_views.py b/admin_tests/users/test_views.py index 703c3e1969c..1d22abbec35 100644 --- a/admin_tests/users/test_views.py +++ b/admin_tests/users/test_views.py @@ -726,7 +726,7 @@ def test_get_user_confirmation_link(self): view = views.GetUserConfirmationLink() view = setup_view(view, request, guid=user._id) - user_token = user.email_verifications.keys()[0] + user_token = list(user.email_verifications.keys())[0] ideal_link_path = '/confirm/{}/{}/'.format(user._id, user_token) link = view.get_link(user) link_path = str(furl.furl(link).path) @@ -739,12 +739,12 @@ def test_get_user_confirmation_link_with_expired_token(self): view = views.GetUserConfirmationLink() view = setup_view(view, request, guid=user._id) - old_user_token = user.email_verifications.keys()[0] + old_user_token = list(user.email_verifications.keys())[0] user.email_verifications[old_user_token]['expiration'] = datetime.utcnow().replace(tzinfo=pytz.utc) - timedelta(hours=24) user.save() link = view.get_link(user) - new_user_token = user.email_verifications.keys()[0] + new_user_token = list(user.email_verifications.keys())[0] link_path = str(furl.furl(link).path) ideal_link_path = '/confirm/{}/{}/'.format(user._id, new_user_token) diff --git a/api/base/exceptions.py b/api/base/exceptions.py index 8c73f23ae2e..dcbdaf0699f 100644 --- a/api/base/exceptions.py +++ b/api/base/exceptions.py @@ -1,8 +1,9 @@ +from past.builtins import basestring from rest_framework import status as http_status from django.utils.translation import ugettext_lazy as _ from rest_framework import status -from rest_framework.exceptions import APIException, AuthenticationFailed +from rest_framework.exceptions import APIException, AuthenticationFailed, ErrorDetail def get_resource_object_member(error_key, context): @@ -33,7 +34,7 @@ def dict_error_formatting(errors, context, index=None): index = str(index) + '/' for error_key, error_description in errors.items(): - if isinstance(error_description, basestring): + if isinstance(error_description, ErrorDetail): error_description = [error_description] if error_key in top_level_error_keys: diff --git a/api/base/filters.py b/api/base/filters.py index d15eadd5f6f..4a026400602 100644 --- a/api/base/filters.py +++ b/api/base/filters.py @@ -20,7 +20,7 @@ from rest_framework.filters import OrderingFilter from osf.models import Subject, Preprint from osf.models.base import GuidMixin - +from functools import cmp_to_key def lowercase(lower): if hasattr(lower, '__call__'): @@ -61,7 +61,7 @@ def filter_queryset(self, request, queryset, view): return super(OSFOrderingFilter, self).filter_queryset(request, queryset, view) if ordering: if isinstance(ordering, (list, tuple)): - sorted_list = sorted(queryset, cmp=sort_multiple(ordering)) + sorted_list = sorted(queryset, key=cmp_to_key(sort_multiple(ordering))) return sorted_list return queryset.sort(*ordering) return queryset diff --git a/api/base/metrics.py b/api/base/metrics.py index c86428dbf33..29526b9c5e4 100644 --- a/api/base/metrics.py +++ b/api/base/metrics.py @@ -72,7 +72,7 @@ def parse_metric_query_params(self, query_params): } """ query = {} - for key, value in query_params.iteritems(): + for key, value in query_params.items(): match = self.METRICS_QUERY_PATTERN.match(key) if match: match_dict = match.groupdict() diff --git a/api/base/parsers.py b/api/base/parsers.py index 9928b95ccdc..6e51844af91 100644 --- a/api/base/parsers.py +++ b/api/base/parsers.py @@ -45,7 +45,7 @@ def flatten_relationships(self, relationships): raise ParseError() # Can only create one type of relationship. - related_resource = relationships.keys()[0] + related_resource = list(relationships.keys())[0] if not isinstance(relationships[related_resource], dict) or related_resource == 'data': raise ParseError() data = relationships[related_resource].get('data') @@ -77,7 +77,7 @@ def flatten_data(self, resource_object, parser_context, is_list): object_type = resource_object.get('type') type_required = not ( - legacy_type_allowed and parser_context['request'].version < 2.7 and request_method == 'PATCH' + legacy_type_allowed and float(parser_context['request'].version) < 2.7 and request_method == 'PATCH' ) # For validating type and id for bulk delete: @@ -210,7 +210,7 @@ def parse(self, stream, media_type=None, parser_context=None): legacy_type_allowed = parser_context.get('legacy_type_allowed', True) type_required = not ( legacy_type_allowed and - parser_context['request'].version < 2.7 and + float(parser_context['request'].version) < 2.7 and parser_context['request'].method == 'PATCH' ) if data: diff --git a/api/base/renderers.py b/api/base/renderers.py index 86206f40111..132009f9675 100644 --- a/api/base/renderers.py +++ b/api/base/renderers.py @@ -7,13 +7,10 @@ class JSONRendererWithESISupport(JSONRenderer): media_type = 'application/json' def render(self, data, accepted_media_type=None, renderer_context=None): - # TODO: There should be a way to do this that is conditional on esi being requested and - # TODO: In such a way that it doesn't use regex unless there's absolutely no other way. initial_rendering = super(JSONRendererWithESISupport, self).render(data, accepted_media_type, renderer_context) - augmented_rendering = re.sub(r'""', r'', initial_rendering) + augmented_rendering = re.sub(r'""', b'', initial_rendering) return augmented_rendering - class JSONAPIRenderer(JSONRendererWithESISupport): format = 'jsonapi' media_type = 'application/vnd.api+json' diff --git a/api/base/serializers.py b/api/base/serializers.py index 5542a63cb70..df1966f366d 100644 --- a/api/base/serializers.py +++ b/api/base/serializers.py @@ -335,7 +335,7 @@ def _url_val(val, obj, serializer, request, **kwargs): url = None if isinstance(val, Link): # If a Link is passed, get the url value url = val.resolve_url(obj, request) - elif isinstance(val, basestring): # if a string is passed, it's a method of the serializer + elif isinstance(val, str): # if a string is passed, it's a method of the serializer if getattr(serializer, 'field', None): serializer = serializer.parent url = getattr(serializer, val)(obj) if obj is not None else None diff --git a/api/base/settings/__init__.py b/api/base/settings/__init__.py index 0758997248b..e6d55dd740a 100644 --- a/api/base/settings/__init__.py +++ b/api/base/settings/__init__.py @@ -15,7 +15,7 @@ try: from .local import * # noqa -except ImportError as error: +except ImportError: warnings.warn( 'No api/base/settings/local.py settings file found. Did you remember to ' 'copy local-dist.py to local.py?', ImportWarning, diff --git a/api/base/utils.py b/api/base/utils.py index 592de335b54..deaf0dc5997 100644 --- a/api/base/utils.py +++ b/api/base/utils.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- +from past.builtins import basestring import furl from future.moves.urllib.parse import urlunsplit, urlsplit, parse_qs, urlencode from distutils.version import StrictVersion diff --git a/api/base/versioning.py b/api/base/versioning.py index ca4a5a62dbf..7cbdd036c37 100644 --- a/api/base/versioning.py +++ b/api/base/versioning.py @@ -61,9 +61,9 @@ def get_header_version(self, request, major_version): version = media_type.params.get(self.version_param) if not version: return None + version = unicode_http_header(version) if version == 'latest': return get_latest_sub_version(major_version) - version = unicode_http_header(version) if not self.is_allowed_version(version): raise drf_exceptions.NotAcceptable(invalid_version_message) return version diff --git a/api/base/views.py b/api/base/views.py index bb3eb48f24e..4f41a3ef7ea 100644 --- a/api/base/views.py +++ b/api/base/views.py @@ -1,3 +1,5 @@ +from builtins import str + from collections import defaultdict from distutils.version import StrictVersion @@ -152,7 +154,7 @@ def get_serializer_context(self): if 'fields[{}]'.format(serializer_class_type) in self.request.query_params: # Check only requested and mandatory fields sparse_fields = self.request.query_params['fields[{}]'.format(serializer_class_type)] - for field in fields_check.copy().keys(): + for field in list(fields_check.copy().keys()): if field not in ('type', 'id', 'links') and field not in sparse_fields: fields_check.pop(field) @@ -162,7 +164,7 @@ def get_serializer_context(self): for field in fields_check: if getattr(fields_check[field], 'always_embed', False) and field not in embeds: - embeds.append(unicode(field)) + embeds.append(str(field)) if getattr(fields_check[field], 'never_embed', False) and field in embeds: embeds.remove(field) embeds_partials = {} diff --git a/api/caching/tests/test_caching.py b/api/caching/tests/test_caching.py index ccead430a9b..22380b220ca 100644 --- a/api/caching/tests/test_caching.py +++ b/api/caching/tests/test_caching.py @@ -1,7 +1,6 @@ from __future__ import unicode_literals import copy -import json import random import unittest import uuid @@ -171,7 +170,6 @@ def validate_keys(self, data, embed_keys): embed_keys = list() if 'errors' in data.keys(): - print json.dumps(data, indent=4) return for item in data['data']: # all these should be lists. if 'embeds' in item.keys(): diff --git a/api/chronos/README.md b/api/chronos/README.md new file mode 100644 index 00000000000..b8be14a3b33 --- /dev/null +++ b/api/chronos/README.md @@ -0,0 +1,20 @@ +## Setting up Chronos locally + +In order to access Chronos's staging server to test Chronos locally one must do a number of things. + +1. Enter or sync the osfio_preprints_1 container and set config/environment.js to include the desired preprint provider in the `chronosProviders` +list. + +2. Make sure that your desired journal has an id listed in config/environment.js's approvedChronosJournalIds, or create a new journal and add that to the list. + +3. Go to website/settings/local.py and add the following + +VERIFY_CHRONOS_SSL_CERT = False +CHRONOS_USE_FAKE_FILE = True +CHRONOS_FAKE_FILE_URL = +CHRONOS_HOST = 'https://staging-api.chronos-oa.com' +CHRONOS_USERNAME = +CHRONOS_PASSWORD = +CHRONOS_API_KEY = + +The link 'Submit to an APA-published journal' should appear when looking at a accepted preprint in the provider you've added to config/environment.js. diff --git a/api/citations/utils.py b/api/citations/utils.py index c4b781c6cd0..05d8d967408 100644 --- a/api/citations/utils.py +++ b/api/citations/utils.py @@ -1,3 +1,5 @@ +# -*- coding: utf-8 -*- + import os import re from rest_framework import status as http_status diff --git a/api/files/views.py b/api/files/views.py index f0a33b32450..842ee30c11e 100644 --- a/api/files/views.py +++ b/api/files/views.py @@ -1,5 +1,6 @@ +import io + from django.http import FileResponse -from django.core.files.base import ContentFile from rest_framework import generics from rest_framework import permissions as drf_permissions @@ -246,7 +247,8 @@ def get(self, request, **kwargs): file_type = self.request.query_params.get('export', 'json') record = self.get_object() try: - response = FileResponse(ContentFile(record.serialize(format=file_type))) + content = io.BytesIO(record.serialize(format=file_type).encode()) + response = FileResponse(content) except ValueError as e: detail = str(e).replace('.', '') raise ValidationError(detail='{} for metadata file export.'.format(detail)) diff --git a/api/institutions/authentication.py b/api/institutions/authentication.py index 02fa01757df..0dc78e105eb 100644 --- a/api/institutions/authentication.py +++ b/api/institutions/authentication.py @@ -61,7 +61,7 @@ def authenticate(self, request): options={'verify_exp': False}, algorithm='HS256', ) - except (jwt.InvalidTokenError, TypeError): + except (jwt.InvalidTokenError, TypeError, jwe.exceptions.MalformedData): raise AuthenticationFailed data = json.loads(payload['data']) diff --git a/api/nodes/serializers.py b/api/nodes/serializers.py index 77eb201d444..dceab441f65 100644 --- a/api/nodes/serializers.py +++ b/api/nodes/serializers.py @@ -301,7 +301,7 @@ class NodeSerializer(TaxonomizableSerializerMixin, JSONAPISerializer): id = IDField(source='_id', read_only=True) type = TypeField() - category_choices = settings.NODE_CATEGORY_MAP.items() + category_choices = list(settings.NODE_CATEGORY_MAP.items()) category_choices_string = ', '.join(["'{}'".format(choice[0]) for choice in category_choices]) title = ser.CharField(required=True) @@ -1091,7 +1091,7 @@ class NodeDetailSerializer(NodeSerializer): class NodeForksSerializer(NodeSerializer): - category_choices = settings.NODE_CATEGORY_MAP.items() + category_choices = list(settings.NODE_CATEGORY_MAP.items()) category_choices_string = ', '.join(["'{}'".format(choice[0]) for choice in category_choices]) title = ser.CharField(required=False) @@ -1230,7 +1230,7 @@ def validate_data(self, node, user_id=None, full_name=None, email=None, index=No raise exceptions.ValidationError(detail='A user ID or full name must be provided to add a contributor.') if user_id and email: raise exceptions.ValidationError(detail='Do not provide an email when providing this user_id.') - if index > len(node.contributors): + if index is not None and index > len(node.contributors): raise exceptions.ValidationError(detail='{} is not a valid contributor index for node with id {}'.format(index, node._id)) def create(self, validated_data): diff --git a/api/nodes/utils.py b/api/nodes/utils.py index 6e97eb7a37d..d11349229ec 100644 --- a/api/nodes/utils.py +++ b/api/nodes/utils.py @@ -88,7 +88,7 @@ def optimize_node_queryset(self, queryset): abstract_node_contenttype_id = ContentType.objects.get_for_model(AbstractNode).id guid = Guid.objects.filter(content_type_id=abstract_node_contenttype_id, object_id=OuterRef('parent_id')) parent = NodeRelation.objects.annotate(parent__id=Subquery(guid.values('_id')[:1])).filter(child=OuterRef('pk'), is_node_link=False) - wiki_addon = WikiNodeSettings.objects.filter(owner=OuterRef('pk'), deleted=False) + wiki_addon = WikiNodeSettings.objects.filter(owner=OuterRef('pk'), is_deleted=False) preprints = Preprint.objects.can_view(user=auth.user).filter(node_id=OuterRef('pk')) region = Region.objects.filter(id=OuterRef('region_id')) node_settings = NodeSettings.objects.annotate(region_abbrev=Subquery(region.values('_id')[:1])).filter(owner_id=OuterRef('pk')) diff --git a/api/nodes/views.py b/api/nodes/views.py index 192993a5240..0d2ea492a8c 100644 --- a/api/nodes/views.py +++ b/api/nodes/views.py @@ -2047,6 +2047,7 @@ def get_object(self): def perform_destroy(self, link): assert isinstance(link, PrivateLink), 'link must be a PrivateLink' link.is_deleted = True + link.deleted = timezone.now() link.save() # FIXME: Doesn't work because instance isn't JSON-serializable # enqueue_postcommit_task(ban_url, (self.get_node(),), {}, celery=False, once_per_request=True) diff --git a/api/preprints/serializers.py b/api/preprints/serializers.py index 43bf0b4f196..b6dca5f398c 100644 --- a/api/preprints/serializers.py +++ b/api/preprints/serializers.py @@ -311,7 +311,7 @@ def set_field(self, func, val, auth, save=False): except PermissionsError as e: raise exceptions.PermissionDenied(detail=str(e)) except (ValueError, ValidationError, NodeStateError) as e: - raise exceptions.ValidationError(detail=e.message) + raise exceptions.ValidationError(detail=str(e)) class PreprintCreateSerializer(PreprintSerializer): diff --git a/api/regions/views.py b/api/regions/views.py index f91929ee372..98d8d0a06a8 100644 --- a/api/regions/views.py +++ b/api/regions/views.py @@ -23,7 +23,7 @@ class RegionMixin(object): def get_region(self): region_id = self.kwargs[self.region_lookup_url_kwarg] if self.kwargs.get('is_embedded') is True: - node_id, node = self.request.parents[Node].items()[0] + node_id, node = list(self.request.parents[Node].items())[0] try: # use the annotated value if possible region_id = node.region diff --git a/api/search/views.py b/api/search/views.py index 629152251ba..5c07a2780cc 100644 --- a/api/search/views.py +++ b/api/search/views.py @@ -66,7 +66,7 @@ def get_queryset(self, query=None): try: results = search.search(query, doc_type=self.doc_type, raw=True) except MalformedQueryError as e: - raise ValidationError(e.message) + raise ValidationError(e) return results diff --git a/api/users/views.py b/api/users/views.py index 503844f3be9..61a27c43de8 100644 --- a/api/users/views.py +++ b/api/users/views.py @@ -103,7 +103,7 @@ def get_user(self, check_permissions=True): # of the query cache if hasattr(self.request, 'parents') and len(self.request.parents.get(Contributor, {})) == 1: # We expect one parent contributor view, so index into the first item - contrib_id, contrib = self.request.parents[Contributor].items()[0] + contrib_id, contrib = list(self.request.parents[Contributor].items())[0] user = contrib.user if user.is_disabled: raise UserGone(user=user) @@ -608,7 +608,7 @@ def get_object(self): except KeyError: raise NotFound('Requested external identity could not be found.') - return {'_id': identity_id, 'external_id': identity.keys()[0], 'status': identity.values()[0]} + return {'_id': identity_id, 'external_id': list(identity.keys())[0], 'status': list(identity.values())[0]} def perform_destroy(self, instance): user = self.get_user() @@ -838,7 +838,7 @@ def get_default_queryset(self): serialized_email = UserEmail(email_id=hashed_id, address=email.address, confirmed=True, verified=True, primary=primary) serialized_emails.append(serialized_email) email_verifications = user.email_verifications or {} - for token, detail in email_verifications.iteritems(): + for token, detail in email_verifications.items(): is_merge = Email.objects.filter(address=detail['email']).exists() serialized_unconfirmed_email = UserEmail( email_id=token, diff --git a/api_tests/base/test_filters.py b/api_tests/base/test_filters.py index 754852c7606..32f6fb609a8 100644 --- a/api_tests/base/test_filters.py +++ b/api_tests/base/test_filters.py @@ -30,6 +30,7 @@ from api.base.settings.defaults import API_BASE from api.base.serializers import RelationshipField +from functools import cmp_to_key class FakeSerializer(ser.Serializer): @@ -426,7 +427,7 @@ def test_filter_queryset_forward(self): self.query(x) for x in 'NewProj Zip Proj Activity'.split()] sorted_query = sorted( query_to_be_sorted, - cmp=filters.sort_multiple(['title']) + key=cmp_to_key(filters.sort_multiple(['title'])) ) sorted_output = [str(i) for i in sorted_query] assert_equal(sorted_output, ['Activity', 'NewProj', 'Proj', 'Zip']) @@ -436,7 +437,7 @@ def test_filter_queryset_forward_duplicate(self): self.query(x) for x in 'NewProj Activity Zip Activity'.split()] sorted_query = sorted( query_to_be_sorted, - cmp=filters.sort_multiple(['title']) + key=cmp_to_key(filters.sort_multiple(['title'])) ) sorted_output = [str(i) for i in sorted_query] assert_equal(sorted_output, ['Activity', 'Activity', 'NewProj', 'Zip']) @@ -446,7 +447,7 @@ def test_filter_queryset_reverse(self): self.query(x) for x in 'NewProj Zip Proj Activity'.split()] sorted_query = sorted( query_to_be_sorted, - cmp=filters.sort_multiple(['-title']) + key=cmp_to_key(filters.sort_multiple(['-title'])) ) sorted_output = [str(i) for i in sorted_query] assert_equal(sorted_output, ['Zip', 'Proj', 'NewProj', 'Activity']) @@ -456,7 +457,7 @@ def test_filter_queryset_reverse_duplicate(self): self.query(x) for x in 'NewProj Activity Zip Activity'.split()] sorted_query = sorted( query_to_be_sorted, - cmp=filters.sort_multiple(['-title']) + key=cmp_to_key(filters.sort_multiple(['-title'])) ) sorted_output = [str(i) for i in sorted_query] assert_equal(sorted_output, ['Zip', 'NewProj', 'Activity', 'Activity']) @@ -468,7 +469,7 @@ def test_filter_queryset_handles_multiple_fields(self): self.query_with_num(title='Activity', number=40)] actual = [ x.number for x in sorted( - objs, cmp=filters.sort_multiple(['title', '-number']) + objs, key=cmp_to_key(filters.sort_multiple(['title', '-number'])) )] assert_equal(actual, [40, 30, 10, 20]) diff --git a/api_tests/base/test_serializers.py b/api_tests/base/test_serializers.py index 38d5017c393..645c7154bf3 100644 --- a/api_tests/base/test_serializers.py +++ b/api_tests/base/test_serializers.py @@ -281,11 +281,11 @@ def test_counts_not_included_in_link_fields_by_default(self): continue if isinstance(relation, list): for item in relation: - link = item['links'].values()[0] + link = list(item['links'].values())[0] link_meta = link.get('meta', {}) assert_not_in('count', link_meta) else: - link = relation['links'].values()[0] + link = list(relation['links'].values())[0] link_meta = link.get('meta', {}) assert_not_in('count', link_meta) @@ -302,7 +302,7 @@ def test_counts_included_in_link_fields_with_related_counts_query_param( field = field.field related_meta = getattr(field, 'related_meta', {}) if related_meta and related_meta.get('count', False): - link = relation['links'].values()[0] + link = list(relation['links'].values())[0] assert_in('count', link['meta'], field) def test_related_counts_excluded_query_param_false(self): @@ -318,7 +318,7 @@ def test_related_counts_excluded_query_param_false(self): link_meta = link.get('meta', {}) assert_not_in('count', link_meta) else: - link = relation['links'].values()[0] + link = list(relation['links'].values())[0] link_meta = link.get('meta', {}) assert_not_in('count', link_meta) @@ -371,7 +371,7 @@ def test_counts_included_in_children_field_with_children_related_counts_query_pa else: assert_not_in('count', link.get('meta', {})) elif relation != {'data': None}: - link = relation['links'].values()[0] + link = list(relation['links'].values())[0] related_meta = getattr(field, 'related_meta', {}) if related_meta and related_meta.get('count', False) and key == 'children': assert_in('count', link['meta']) @@ -401,7 +401,7 @@ def test_counts_included_in_children_and_contributors_fields_with_field_csv_rela else: assert_not_in('count', link.get('meta', {})) elif relation != {'data': None}: - link = relation['links'].values()[0] + link = list(relation['links'].values())[0] related_meta = getattr(field, 'related_meta', {}) if related_meta and related_meta.get('count', False) and key == 'children' or key == 'contributors': assert_in('count', link['meta']) diff --git a/api_tests/base/test_utils.py b/api_tests/base/test_utils.py index a8af66f79a3..2de9eabacd9 100644 --- a/api_tests/base/test_utils.py +++ b/api_tests/base/test_utils.py @@ -101,13 +101,13 @@ def test_push_status_message_unexpected_error(self, mock_sesh): False, 'push_status_message() should have generated a RuntimeError exception.' ) - except ValidationError as e: + except ValidationError: assert_true( False, 'push_status_message() should have re-raised the RuntimeError not gotten ValidationError.' ) except RuntimeError as e: - assert_equal(getattr(e, 'message', None), + assert_equal(str(e), exception_message, 'push_status_message() should have re-raised the ' 'original RuntimeError with the original message.') diff --git a/api_tests/base/test_versioning.py b/api_tests/base/test_versioning.py index a8d991892da..89d38a383b4 100644 --- a/api_tests/base/test_versioning.py +++ b/api_tests/base/test_versioning.py @@ -124,7 +124,7 @@ def test_browsable_api_query_version(self, app): url = '/v2/?format=api&version=2.5' res = app.get(url) assert res.status_code == 200 - assert '"version": "2.5"' in res.body + assert b'"version": "2.5"' in res.body def test_json_defaults_to_default(self, app): url = '/v2/?format=json' diff --git a/api_tests/collections/test_views.py b/api_tests/collections/test_views.py index 861cba21e81..4a7d97a54a1 100644 --- a/api_tests/collections/test_views.py +++ b/api_tests/collections/test_views.py @@ -19,7 +19,6 @@ from osf.models import Collection from osf.utils.sanitize import strip_html from osf.utils.permissions import ADMIN, WRITE, READ -from tests.utils import assert_items_equal from website.project.signals import contributor_removed from api_tests.utils import disconnected_from_listeners from website.views import find_bookmark_collection @@ -908,9 +907,9 @@ def test_collection_nodelinks_list_returns( # node_links end point does not handle registrations correctly third_embedded = res_json[2]['embeds']['target_node']['errors'][0]['detail'] fourth_embedded = res_json[3]['embeds']['target_node']['errors'][0]['detail'] - assert_items_equal( - [first_embedded, second_embedded, third_embedded, fourth_embedded], - [project_private._id, project_public._id, 'Not found.', 'Not found.']) + + expected = [project_private._id, project_public._id, 'Not found.', 'Not found.'] + assert expected == [first_embedded, second_embedded, third_embedded, fourth_embedded] # test_return_private_node_pointers_logged_in_non_contributor res = app.get( @@ -1693,12 +1692,10 @@ def test_bulk_create_error_formatting( assert res.status_code == 400 assert len(res.json['errors']) == 2 errors = res.json['errors'] - assert_items_equal( - [errors[0]['source'], errors[1]['source']], - [{'pointer': '/data/0/attributes/title'}, {'pointer': '/data/1/attributes/title'}]) - assert_items_equal( - [errors[0]['detail'], errors[1]['detail']], - ['This field may not be blank.', 'This field may not be blank.']) + assert [errors[0]['source'], errors[1]['source']] == \ + [{'pointer': '/data/0/attributes/title'}, {'pointer': '/data/1/attributes/title'}] + assert [errors[0]['detail'], errors[1]['detail']] ==\ + ['This field may not be blank.', 'This field may not be blank.'] def test_non_mutational_collection_bulk_create_tests( self, app, bookmark_user_one, url_collections, collection_one, @@ -1966,10 +1963,12 @@ def test_non_mutational_collection_bulk_update_tests( assert res.status_code == 400 assert len(res.json['errors']) == 2 errors = res.json['errors'] - assert_items_equal([errors[0]['source'], errors[1]['source']], [ - {'pointer': '/data/0/attributes/title'}, {'pointer': '/data/1/attributes/title'}]) - assert_items_equal([errors[0]['detail'], errors[1]['detail']], - ['This field may not be blank.'] * 2) + assert [errors[0]['source'], + errors[1]['source']] == [ + {'pointer': '/data/0/attributes/title'}, + {'pointer': '/data/1/attributes/title'} + ] + assert [errors[0]['detail'], errors[1]['detail']] == ['This field may not be blank.'] * 2 # test_bulk_update_id_not_supplied res = app.put_json_api( diff --git a/api_tests/files/views/test_file_detail.py b/api_tests/files/views/test_file_detail.py index b5914669150..547a2db7fc0 100644 --- a/api_tests/files/views/test_file_detail.py +++ b/api_tests/files/views/test_file_detail.py @@ -171,7 +171,7 @@ def test_get_file(self, app, user, file_url, file): res = app.get(file_url, auth=user.auth) file.versions.first().reload() assert res.status_code == 200 - assert res.json.keys() == ['meta', 'data'] + assert list(res.json.keys()) == ['meta', 'data'] attributes = res.json['data']['attributes'] assert attributes['path'] == file.path assert attributes['kind'] == file.kind @@ -575,7 +575,7 @@ def test_get_file_guids_misc(self, app, user, file, node): url = '/{}files/{}/'.format(API_BASE, guid._id) res = app.get(url, auth=user.auth) assert res.status_code == 200 - assert res.json.keys() == ['meta', 'data'] + assert list(res.json.keys()) == ['meta', 'data'] assert res.json['data']['attributes']['path'] == file.path # test_get_file_invalid_guid_gives_404 diff --git a/api_tests/identifiers/views/test_identifier_list.py b/api_tests/identifiers/views/test_identifier_list.py index 2f28a3694c9..fb42d9f8009 100644 --- a/api_tests/identifiers/views/test_identifier_list.py +++ b/api_tests/identifiers/views/test_identifier_list.py @@ -19,7 +19,7 @@ ) from osf.utils.permissions import READ, WRITE from osf.utils.workflows import DefaultStates -from tests.utils import assert_items_equal +from tests.utils import assert_equals from website.identifiers.clients import DataCiteClient from website import settings @@ -94,13 +94,13 @@ def test_identifier_list_returns_correct_categories_and_values( categories = [identifier.category for identifier in all_identifiers] categories_in_response = [identifier['attributes']['category'] for identifier in data_registration_identifiers] - assert_items_equal(categories_in_response, categories) + assert_equals(categories_in_response, categories) # test_identifier_list_returns_correct_values values = [identifier.value for identifier in all_identifiers] values_in_response = [identifier['attributes']['value'] for identifier in data_registration_identifiers] - assert_items_equal(values_in_response, values) + assert_equals(values_in_response, values) def test_identifier_filter_by_category( self, app, registration, identifier_registration, @@ -109,7 +109,7 @@ def test_identifier_filter_by_category( IdentifierFactory(referent=registration, category='nopeid') identifiers_for_registration = registration.identifiers assert identifiers_for_registration.count() == 2 - assert_items_equal( + assert_equals( list( identifiers_for_registration.values_list( 'category', @@ -209,14 +209,14 @@ def test_identifier_list_returns_correct_categories_and_values( categories = [identifier.category for identifier in all_identifiers] categories_in_response = [ identifier['attributes']['category'] for identifier in data_node_identifiers] - assert_items_equal(categories_in_response, categories) + assert_equals(categories_in_response, categories) # test_identifier_list_returns_correct_values values = [identifier.value for identifier in all_identifiers] values_in_response = [ identifier['attributes']['value'] for identifier in data_node_identifiers ] - assert_items_equal(values_in_response, values) + assert_equals(values_in_response, values) def test_identifier_filter_by_category( self, app, node, identifier_node, url_node_identifiers): @@ -224,7 +224,7 @@ def test_identifier_filter_by_category( identifiers_for_node = Identifier.objects.filter(object_id=node.id) assert identifiers_for_node.count() == 2 - assert_items_equal( + assert_equals( [identifier.category for identifier in identifiers_for_node], ['carpid', 'nopeid'] ) @@ -311,13 +311,13 @@ def test_identifier_list_returns_correct_categories_and_values( categories = all_identifiers.values_list('category', flat=True) categories_in_response = [identifier['attributes']['category'] for identifier in data_preprint_identifier] - assert_items_equal(categories_in_response, list(categories)) + assert_equals(categories_in_response, list(categories)) # test_identifier_list_returns_correct_values values = all_identifiers.values_list('value', flat=True) values_in_response = [identifier['attributes']['value'] for identifier in data_preprint_identifier] - assert_items_equal(values_in_response, list(values)) + assert_equals(values_in_response, list(values)) def test_preprint_identifier_list_permissions_unpublished( self, app, all_identifiers, user, data_preprint_identifier, preprint, url_preprint_identifier): diff --git a/api_tests/logs/views/test_log_params.py b/api_tests/logs/views/test_log_params.py index f3ed2b49323..8a540eab084 100644 --- a/api_tests/logs/views/test_log_params.py +++ b/api_tests/logs/views/test_log_params.py @@ -6,7 +6,7 @@ ProjectFactory, PrivateLinkFactory, ) -from test_log_detail import LogsTestCase +from api_tests.logs.views.test_log_detail import LogsTestCase # TODO add tests for other log params diff --git a/api_tests/nodes/views/test_node_contributors_list.py b/api_tests/nodes/views/test_node_contributors_list.py index e39819607e2..d7d61352c84 100644 --- a/api_tests/nodes/views/test_node_contributors_list.py +++ b/api_tests/nodes/views/test_node_contributors_list.py @@ -20,7 +20,7 @@ from osf.utils import permissions from rest_framework import exceptions from tests.base import capture_signals, fake -from tests.utils import assert_latest_log, assert_items_equal +from tests.utils import assert_latest_log, assert_equals from website.project.signals import contributor_added, contributor_removed from api_tests.utils import disconnected_from_listeners @@ -135,7 +135,7 @@ def test_permissions_work_with_many_users( permissions.READ: [] } for i in range(0, 25): - perm = random.choice(users.keys()) + perm = random.choice(list(users.keys())) user = AuthUserFactory() project_private.add_contributor(user, permissions=perm) @@ -1682,9 +1682,10 @@ def test_node_contributor_bulk_create_logged_in_public_project_project( {'data': [payload_one, payload_two]}, auth=user.auth, bulk=True) assert res.status_code == 201 - assert_items_equal([res.json['data'][0]['attributes']['bibliographic'], + assert_equals([res.json['data'][0]['attributes']['bibliographic'], res.json['data'][1]['attributes']['bibliographic']], [True, False]) - assert_items_equal([res.json['data'][0]['attributes']['permission'], + + assert_equals([res.json['data'][0]['attributes']['permission'], res.json['data'][1]['attributes']['permission']], [permissions.ADMIN, permissions.READ]) assert res.content_type == 'application/vnd.api+json' @@ -1697,9 +1698,10 @@ def test_node_contributor_bulk_create_logged_in_contrib_private_project( auth=user.auth, expect_errors=True, bulk=True) assert res.status_code == 201 assert len(res.json['data']) == 2 - assert_items_equal([res.json['data'][0]['attributes']['bibliographic'], + assert_equals([res.json['data'][0]['attributes']['bibliographic'], res.json['data'][1]['attributes']['bibliographic']], [True, False]) - assert_items_equal([res.json['data'][0]['attributes']['permission'], + + assert_equals([res.json['data'][0]['attributes']['permission'], res.json['data'][1]['attributes']['permission']], [permissions.ADMIN, permissions.READ]) assert res.content_type == 'application/vnd.api+json' @@ -1917,7 +1919,7 @@ def test_bulk_update_contributors_errors( res = app.get(url_public) data = res.json['data'] - assert_items_equal( + assert_equals( [data[0]['attributes']['permission'], data[1]['attributes']['permission'], data[2]['attributes']['permission']], @@ -1936,7 +1938,7 @@ def test_bulk_update_contributors_errors( res = app.get(url_public, auth=user.auth) data = res.json['data'] - assert_items_equal( + assert_equals( [data[0]['attributes']['permission'], data[1]['attributes']['permission'], data[2]['attributes']['permission']], @@ -1955,7 +1957,7 @@ def test_bulk_update_contributors_errors( res = app.get(url_private, auth=user.auth) data = res.json['data'] - assert_items_equal( + assert_equals( [data[0]['attributes']['permission'], data[1]['attributes']['permission'], data[2]['attributes']['permission']], @@ -1974,7 +1976,7 @@ def test_bulk_update_contributors_errors( res = app.get(url_private, auth=user.auth) data = res.json['data'] - assert_items_equal( + assert_equals( [data[0]['attributes']['permission'], data[1]['attributes']['permission'], data[2]['attributes']['permission']], @@ -1993,7 +1995,7 @@ def test_bulk_update_contributors_errors( res = app.get(url_private, auth=user.auth) data = res.json['data'] - assert_items_equal( + assert_equals( [data[0]['attributes']['permission'], data[1]['attributes']['permission'], data[2]['attributes']['permission']], @@ -2102,7 +2104,7 @@ def test_bulk_update_contributors_errors( res = app.get(url_public, auth=user.auth) data = res.json['data'] - assert_items_equal( + assert_equals( [data[0]['attributes']['permission'], data[1]['attributes']['permission'], data[2]['attributes']['permission']], @@ -2132,7 +2134,7 @@ def test_bulk_update_contributors_errors( res = app.get(url_public, auth=user.auth) data = res.json['data'] - assert_items_equal( + assert_equals( [data[0]['attributes']['permission'], data[1]['attributes']['permission'], data[2]['attributes']['permission']], @@ -2199,7 +2201,7 @@ def test_bulk_update_contributors_public_projects_logged_in( ) assert res.status_code == 200 data = res.json['data'] - assert_items_equal( + assert_equals( [data[0]['attributes']['permission'], data[1]['attributes']['permission']], [permissions.ADMIN, permissions.WRITE] @@ -2214,7 +2216,7 @@ def test_bulk_update_contributors_private_projects_logged_in_contrib( ) assert res.status_code == 200 data = res.json['data'] - assert_items_equal( + assert_equals( [data[0]['attributes']['permission'], data[1]['attributes']['permission']], [permissions.ADMIN, permissions.WRITE] @@ -2359,7 +2361,7 @@ def test_bulk_partial_update_errors( res = app.get(url_public) data = res.json['data'] - assert_items_equal( + assert_equals( [data[0]['attributes']['permission'], data[1]['attributes']['permission'], data[2]['attributes']['permission']], @@ -2375,7 +2377,7 @@ def test_bulk_partial_update_errors( res = app.get(url_public, auth=user.auth) data = res.json['data'] - assert_items_equal( + assert_equals( [data[0]['attributes']['permission'], data[1]['attributes']['permission'], data[2]['attributes']['permission']], @@ -2392,7 +2394,7 @@ def test_bulk_partial_update_errors( res = app.get(url_private, auth=user.auth) data = res.json['data'] - assert_items_equal( + assert_equals( [data[0]['attributes']['permission'], data[1]['attributes']['permission'], data[2]['attributes']['permission']], @@ -2409,7 +2411,7 @@ def test_bulk_partial_update_errors( res = app.get(url_private, auth=user.auth) data = res.json['data'] - assert_items_equal( + assert_equals( [data[0]['attributes']['permission'], data[1]['attributes']['permission'], data[2]['attributes']['permission']], @@ -2427,7 +2429,7 @@ def test_bulk_partial_update_errors( res = app.get(url_private, auth=user.auth) data = res.json['data'] - assert_items_equal( + assert_equals( [data[0]['attributes']['permission'], data[1]['attributes']['permission'], data[2]['attributes']['permission']], @@ -2524,7 +2526,7 @@ def test_bulk_partial_update_errors( res = app.get(url_public, auth=user.auth) data = res.json['data'] - assert_items_equal( + assert_equals( [data[0]['attributes']['permission'], data[1]['attributes']['permission'], data[2]['attributes']['permission']], @@ -2550,7 +2552,7 @@ def test_bulk_partial_update_errors( res = app.get(url_public, auth=user.auth) data = res.json['data'] - assert_items_equal( + assert_equals( [data[0]['attributes']['permission'], data[1]['attributes']['permission'], data[2]['attributes']['permission']], @@ -2564,7 +2566,7 @@ def test_bulk_partial_update_contributors_public_projects_logged_in( auth=user.auth, bulk=True) assert res.status_code == 200 data = res.json['data'] - assert_items_equal( + assert_equals( [data[0]['attributes']['permission'], data[1]['attributes']['permission']], [permissions.ADMIN, permissions.WRITE]) @@ -2577,7 +2579,7 @@ def test_bulk_partial_update_contributors_private_projects_logged_in_contrib( auth=user.auth, bulk=True) assert res.status_code == 200 data = res.json['data'] - assert_items_equal( + assert_equals( [data[0]['attributes']['permission'], data[1]['attributes']['permission']], [permissions.ADMIN, permissions.WRITE]) diff --git a/api_tests/nodes/views/test_node_detail.py b/api_tests/nodes/views/test_node_detail.py index ace1b284b2b..a6668f514e7 100644 --- a/api_tests/nodes/views/test_node_detail.py +++ b/api_tests/nodes/views/test_node_detail.py @@ -31,7 +31,7 @@ ) from rest_framework import exceptions from tests.base import fake -from tests.utils import assert_items_equal, assert_latest_log, assert_latest_log_not +from tests.utils import assert_equals, assert_latest_log, assert_latest_log_not from website.views import find_bookmark_collection @@ -163,7 +163,7 @@ def test_return_private_project_details_logged_in_write_contributor( assert res.json['data']['attributes']['description'] == project_private.description assert res.json['data']['attributes']['category'] == project_private.category assert res.json['data']['attributes']['current_user_is_contributor'] is True - assert_items_equal( + assert_equals( res.json['data']['attributes']['current_user_permissions'], permissions_write) @@ -1362,6 +1362,7 @@ def test_public_project_with_publicly_editable_wiki_turns_private( make_node_payload(project_public, {'public': False}), auth=user.auth # self.user is creator/admin ) + assert res.status_code == 200 @mock.patch('website.identifiers.tasks.update_doi_metadata_on_change.s') diff --git a/api_tests/nodes/views/test_node_draft_registration_detail.py b/api_tests/nodes/views/test_node_draft_registration_detail.py index 44dfac8afff..cf26a6ee335 100644 --- a/api_tests/nodes/views/test_node_draft_registration_detail.py +++ b/api_tests/nodes/views/test_node_draft_registration_detail.py @@ -14,7 +14,7 @@ ) from osf.utils.permissions import WRITE, READ from rest_framework import exceptions -from test_node_draft_registration_list import DraftRegistrationTestCase +from api_tests.nodes.views.test_node_draft_registration_list import DraftRegistrationTestCase SCHEMA_VERSION = 2 diff --git a/api_tests/nodes/views/test_node_list.py b/api_tests/nodes/views/test_node_list.py index fdd487a05c7..97adb4ad38e 100644 --- a/api_tests/nodes/views/test_node_list.py +++ b/api_tests/nodes/views/test_node_list.py @@ -24,7 +24,7 @@ ) from addons.osfstorage.settings import DEFAULT_REGION_ID from rest_framework import exceptions -from tests.utils import assert_items_equal +from tests.utils import assert_equals from website.views import find_bookmark_collection from osf.utils.workflows import DefaultStates @@ -3387,12 +3387,12 @@ def test_skip_uneditable_bulk_update( assert res.status_code == 200 edited = res.json['data'] skipped = res.json['errors'] - assert_items_equal( + assert_equals( [edited[0]['id'], edited[1]['id']], [user_one_public_project_one._id, user_one_public_project_two._id] ) - assert_items_equal( + assert_equals( [skipped[0]['_id'], skipped[1]['_id']], [user_two_public_project_one._id, user_two_public_project_two._id] @@ -3421,12 +3421,12 @@ def test_skip_uneditable_bulk_partial_update( assert res.status_code == 200 edited = res.json['data'] skipped = res.json['errors'] - assert_items_equal( + assert_equals( [edited[0]['id'], edited[1]['id']], [user_one_public_project_one._id, user_one_public_project_two._id] ) - assert_items_equal( + assert_equals( [skipped[0]['_id'], skipped[1]['_id']], [user_two_public_project_one._id, user_two_public_project_two._id] @@ -4000,13 +4000,13 @@ def test_skip_uneditable_bulk_delete( res = app.delete_json_api(url, payload, auth=user_one.auth, bulk=True) assert res.status_code == 200 skipped = res.json['errors'] - assert_items_equal( + assert_equals( [skipped[0]['id'], skipped[1]['id']], [public_project_three._id, public_project_four._id] ) res = app.get('/{}nodes/'.format(API_BASE), auth=user_one.auth) - assert_items_equal( + assert_equals( [res.json['data'][0]['id'], res.json['data'][1]['id']], [public_project_three._id, public_project_four._id] ) diff --git a/api_tests/nodes/views/test_node_sparse_fieldsets.py b/api_tests/nodes/views/test_node_sparse_fieldsets.py index b38b8df090f..65b6e2d2432 100644 --- a/api_tests/nodes/views/test_node_sparse_fieldsets.py +++ b/api_tests/nodes/views/test_node_sparse_fieldsets.py @@ -149,10 +149,10 @@ def test_node_sparse_fields_detail_non_mutating_tests( assert set(node_json.keys()) == set( ['links', 'type', 'id', 'attributes', 'relationships', 'embeds']) - assert node_json['attributes'].keys() == ['title'] + assert list(node_json['attributes'].keys()) == ['title'] assert set(node_json['embeds']['children']['data'][0].keys()) == set( ['links', 'type', 'id', 'attributes', 'relationships']) - assert node_json['embeds']['children']['data'][0]['attributes'].keys() == [ + assert list(node_json['embeds']['children']['data'][0]['attributes'].keys()) == [ 'title'] assert node_json['embeds']['children']['data'][0]['attributes']['title'] == child.title @@ -164,7 +164,7 @@ def test_node_sparse_fields_detail_non_mutating_tests( assert set(node_json.keys()) == set( ['links', 'type', 'id', 'attributes', 'embeds', 'relationships']) - assert node_json['attributes'].keys() == ['title'] + assert list(node_json['attributes'].keys()) == ['title'] assert len(node_json['embeds']['contributors']['data']) == 1 assert node_json['embeds']['contributors']['data'][0]['id'] == '{}-{}'.format( node._id, user._id) @@ -178,7 +178,7 @@ def test_node_sparse_fields_detail_non_mutating_tests( assert set(node_json.keys()) == set( ['links', 'type', 'id', 'attributes', 'embeds', 'relationships']) - assert len(node_json['attributes'].keys()) > 1 + assert len(list(node_json['attributes'].keys())) > 1 assert len(node_json['embeds']['contributors']['data']) == 1 assert node_json['embeds']['contributors']['data'][0]['id'] == '{}-{}'.format( node._id, user._id) @@ -193,11 +193,11 @@ def test_node_sparse_fields_detail_non_mutating_tests( assert set(node_json.keys()) == set( ['links', 'type', 'id', 'attributes', 'embeds', 'relationships']) - assert node_json['attributes'].keys() == ['title'] + assert list(node_json['attributes'].keys()) == ['title'] assert len(node_json['embeds']['contributors']['data']) == 1 assert node_json['embeds']['contributors']['data'][0]['id'] == '{}-{}'.format( node._id, user._id) - assert node_json['embeds']['contributors']['data'][0]['attributes'].keys() == [ + assert list(node_json['embeds']['contributors']['data'][0]['attributes'].keys()) == [ 'bibliographic'] def test_update_with_sparse_fields(self, app, user, node, url): @@ -275,7 +275,7 @@ def test_sparse_fields_with_anonymous_link( 'embed': 'contributors' }) # current_user_can_comment and contributors are anonymized fields, should be removed assert res.status_code == 200 - assert res.json['data']['attributes'].keys() == ['title'] + assert list(res.json['data']['attributes'].keys()) == ['title'] embeds = res.json['data'].get('embeds', None) assert embeds is None or 'contributors' not in embeds diff --git a/api_tests/nodes/views/test_node_view_only_links_detail.py b/api_tests/nodes/views/test_node_view_only_links_detail.py index 9c445a72791..aec89fa7ee2 100644 --- a/api_tests/nodes/views/test_node_view_only_links_detail.py +++ b/api_tests/nodes/views/test_node_view_only_links_detail.py @@ -1,5 +1,9 @@ +import pytz +import mock +import datetime import pytest +from django.utils import timezone from api.base.settings.defaults import API_BASE from osf_tests.factories import ( ProjectFactory, @@ -225,10 +229,13 @@ def test_cannot_update_vol( @pytest.mark.django_db class TestViewOnlyLinksDelete: def test_admin_can_delete_vol(self, app, user, url, view_only_link): - res = app.delete(url, auth=user.auth) + mock_now = datetime.datetime(2017, 3, 16, 11, 00, tzinfo=pytz.utc) + with mock.patch.object(timezone, 'now', return_value=mock_now): + res = app.delete(url, auth=user.auth) view_only_link.reload() assert res.status_code == 204 assert view_only_link.is_deleted + assert view_only_link.deleted == mock_now def test_vol_delete( self, app, write_contrib, read_contrib, non_contrib, url): diff --git a/api_tests/nodes/views/test_node_wiki_list.py b/api_tests/nodes/views/test_node_wiki_list.py index 27c3074813b..6a557113cfc 100644 --- a/api_tests/nodes/views/test_node_wiki_list.py +++ b/api_tests/nodes/views/test_node_wiki_list.py @@ -168,7 +168,7 @@ def test_wikis_not_returned_for_withdrawn_registration( private_registration.is_public = True withdrawal = private_registration.retract_registration( user=user, save=True) - token = withdrawal.approval_state.values()[0]['approval_token'] + token = list(withdrawal.approval_state.values())[0]['approval_token'] # TODO: Remove mocking when StoredFileNode is implemented with mock.patch('osf.models.AbstractNode.update_search'): withdrawal.approve_retraction(user, token) diff --git a/api_tests/preprints/views/test_preprint_contributors_list.py b/api_tests/preprints/views/test_preprint_contributors_list.py index 613e6aa3443..035f2bef7a8 100644 --- a/api_tests/preprints/views/test_preprint_contributors_list.py +++ b/api_tests/preprints/views/test_preprint_contributors_list.py @@ -21,7 +21,7 @@ from osf.utils.workflows import DefaultStates from rest_framework import exceptions from tests.base import capture_signals, fake -from tests.utils import assert_latest_log, assert_items_equal +from tests.utils import assert_latest_log, assert_equals from website.project.signals import contributor_added, contributor_removed from api_tests.utils import disconnected_from_listeners @@ -110,7 +110,7 @@ def test_permissions_work_with_many_users( permissions.READ: [] } for i in range(0, 25): - perm = random.choice(users.keys()) + perm = random.choice(list(users.keys())) user_two = AuthUserFactory() preprint_unpublished.add_contributor(user_two, permissions=perm) @@ -1709,10 +1709,12 @@ def test_preprint_contributor_bulk_create_logged_in_published_preprint( {'data': [payload_one, payload_two]}, auth=user.auth, bulk=True) assert res.status_code == 201 - assert_items_equal([res.json['data'][0]['attributes']['bibliographic'], + assert_equals([res.json['data'][0]['attributes']['bibliographic'], res.json['data'][1]['attributes']['bibliographic']], [True, False]) - assert_items_equal([res.json['data'][0]['attributes']['permission'], + + assert_equals([res.json['data'][0]['attributes']['permission'], res.json['data'][1]['attributes']['permission']], [permissions.ADMIN, permissions.READ]) + assert res.content_type == 'application/vnd.api+json' res = app.get(url_published, auth=user.auth) @@ -1724,10 +1726,12 @@ def test_preprint_contributor_bulk_create_logged_in_contrib_unpublished_preprint auth=user.auth, expect_errors=True, bulk=True) assert res.status_code == 201 assert len(res.json['data']) == 2 - assert_items_equal([res.json['data'][0]['attributes']['bibliographic'], + assert_equals([res.json['data'][0]['attributes']['bibliographic'], res.json['data'][1]['attributes']['bibliographic']], [True, False]) - assert_items_equal([res.json['data'][0]['attributes']['permission'], + + assert_equals([res.json['data'][0]['attributes']['permission'], res.json['data'][1]['attributes']['permission']], [permissions.ADMIN, permissions.READ]) + assert res.content_type == 'application/vnd.api+json' res = app.get(url_unpublished, auth=user.auth) @@ -1943,7 +1947,7 @@ def test_bulk_update_contributors_errors( res = app.get(url_published) data = res.json['data'] - assert_items_equal( + assert_equals( [data[0]['attributes']['permission'], data[1]['attributes']['permission'], data[2]['attributes']['permission']], @@ -1962,7 +1966,7 @@ def test_bulk_update_contributors_errors( res = app.get(url_published, auth=user.auth) data = res.json['data'] - assert_items_equal( + assert_equals( [data[0]['attributes']['permission'], data[1]['attributes']['permission'], data[2]['attributes']['permission']], @@ -1981,7 +1985,7 @@ def test_bulk_update_contributors_errors( res = app.get(url_unpublished, auth=user.auth) data = res.json['data'] - assert_items_equal( + assert_equals( [data[0]['attributes']['permission'], data[1]['attributes']['permission'], data[2]['attributes']['permission']], @@ -2000,7 +2004,7 @@ def test_bulk_update_contributors_errors( res = app.get(url_unpublished, auth=user.auth) data = res.json['data'] - assert_items_equal( + assert_equals( [data[0]['attributes']['permission'], data[1]['attributes']['permission'], data[2]['attributes']['permission']], @@ -2019,7 +2023,7 @@ def test_bulk_update_contributors_errors( res = app.get(url_unpublished, auth=user.auth) data = res.json['data'] - assert_items_equal( + assert_equals( [data[0]['attributes']['permission'], data[1]['attributes']['permission'], data[2]['attributes']['permission']], @@ -2128,7 +2132,7 @@ def test_bulk_update_contributors_errors( res = app.get(url_published, auth=user.auth) data = res.json['data'] - assert_items_equal( + assert_equals( [data[0]['attributes']['permission'], data[1]['attributes']['permission'], data[2]['attributes']['permission']], @@ -2158,7 +2162,7 @@ def test_bulk_update_contributors_errors( res = app.get(url_published, auth=user.auth) data = res.json['data'] - assert_items_equal( + assert_equals( [data[0]['attributes']['permission'], data[1]['attributes']['permission'], data[2]['attributes']['permission']], @@ -2225,7 +2229,7 @@ def test_bulk_update_contributors_published_preprints_logged_in( ) assert res.status_code == 200 data = res.json['data'] - assert_items_equal( + assert_equals( [data[0]['attributes']['permission'], data[1]['attributes']['permission']], [permissions.ADMIN, permissions.WRITE] @@ -2240,7 +2244,7 @@ def test_bulk_update_contributors_unpublished_preprints_logged_in_contrib( ) assert res.status_code == 200 data = res.json['data'] - assert_items_equal( + assert_equals( [data[0]['attributes']['permission'], data[1]['attributes']['permission']], [permissions.ADMIN, permissions.WRITE] @@ -2384,7 +2388,7 @@ def test_bulk_partial_update_errors( res = app.get(url_published) data = res.json['data'] - assert_items_equal( + assert_equals( [data[0]['attributes']['permission'], data[1]['attributes']['permission'], data[2]['attributes']['permission']], @@ -2400,7 +2404,7 @@ def test_bulk_partial_update_errors( res = app.get(url_published, auth=user.auth) data = res.json['data'] - assert_items_equal( + assert_equals( [data[0]['attributes']['permission'], data[1]['attributes']['permission'], data[2]['attributes']['permission']], @@ -2417,7 +2421,7 @@ def test_bulk_partial_update_errors( res = app.get(url_unpublished, auth=user.auth) data = res.json['data'] - assert_items_equal( + assert_equals( [data[0]['attributes']['permission'], data[1]['attributes']['permission'], data[2]['attributes']['permission']], @@ -2434,7 +2438,7 @@ def test_bulk_partial_update_errors( res = app.get(url_unpublished, auth=user.auth) data = res.json['data'] - assert_items_equal( + assert_equals( [data[0]['attributes']['permission'], data[1]['attributes']['permission'], data[2]['attributes']['permission']], @@ -2452,7 +2456,7 @@ def test_bulk_partial_update_errors( res = app.get(url_unpublished, auth=user.auth) data = res.json['data'] - assert_items_equal( + assert_equals( [data[0]['attributes']['permission'], data[1]['attributes']['permission'], data[2]['attributes']['permission']], @@ -2549,7 +2553,7 @@ def test_bulk_partial_update_errors( res = app.get(url_published, auth=user.auth) data = res.json['data'] - assert_items_equal( + assert_equals( [data[0]['attributes']['permission'], data[1]['attributes']['permission'], data[2]['attributes']['permission']], @@ -2575,7 +2579,7 @@ def test_bulk_partial_update_errors( res = app.get(url_published, auth=user.auth) data = res.json['data'] - assert_items_equal( + assert_equals( [data[0]['attributes']['permission'], data[1]['attributes']['permission'], data[2]['attributes']['permission']], @@ -2589,7 +2593,7 @@ def test_bulk_partial_update_contributors_published_preprints_logged_in( auth=user.auth, bulk=True) assert res.status_code == 200 data = res.json['data'] - assert_items_equal( + assert_equals( [data[0]['attributes']['permission'], data[1]['attributes']['permission']], [permissions.ADMIN, permissions.WRITE]) @@ -2602,7 +2606,7 @@ def test_bulk_partial_update_contributors_unpublished_preprints_logged_in_contri auth=user.auth, bulk=True) assert res.status_code == 200 data = res.json['data'] - assert_items_equal( + assert_equals( [data[0]['attributes']['permission'], data[1]['attributes']['permission']], [permissions.ADMIN, permissions.WRITE]) diff --git a/api_tests/registrations/views/test_registration_list.py b/api_tests/registrations/views/test_registration_list.py index 3ef5259b1f6..fd15ffd706e 100644 --- a/api_tests/registrations/views/test_registration_list.py +++ b/api_tests/registrations/views/test_registration_list.py @@ -70,11 +70,8 @@ def test_return_registrations_logged_in_contributor(self): assert_equal(res.content_type, 'application/vnd.api+json') - assert_items_equal( - [registered_from_one, registered_from_two], - ['/{}nodes/{}/'.format(API_BASE, self.public_project._id), + assert [registered_from_one, registered_from_two] == ['/{}nodes/{}/'.format(API_BASE, self.public_project._id), '/{}nodes/{}/'.format(API_BASE, self.project._id)] - ) def test_return_registrations_logged_in_non_contributor(self): res = self.app.get(self.url, auth=self.user_two.auth) diff --git a/api_tests/users/views/test_user_detail.py b/api_tests/users/views/test_user_detail.py index 5f06bd45d6d..fa9757cdd70 100644 --- a/api_tests/users/views/test_user_detail.py +++ b/api_tests/users/views/test_user_detail.py @@ -1302,7 +1302,6 @@ def test_user_put_profile_date_validate_int(self, app, user_one, user_one_url, r request_payload['data']['attributes'][request_key][0]['startYear'] = 'string' res = app.put_json_api(user_one_url, request_payload, auth=user_one.auth, expect_errors=True) assert res.status_code == 400 - print res.json['errors'][0] assert res.json['errors'][0]['detail'] == "For 'startYear' the field value u'string' is not of type u'integer'" def test_user_put_profile_date_validate_positive(self, app, user_one, user_one_url, request_payload, request_key): diff --git a/api_tests/users/views/test_user_settings.py b/api_tests/users/views/test_user_settings.py index eab7fffca6c..3057881ed16 100644 --- a/api_tests/users/views/test_user_settings.py +++ b/api_tests/users/views/test_user_settings.py @@ -300,7 +300,7 @@ def test_filter_by_attributes(self, mock_throttle, app, url, user_one): user_one.save() # test filter by confirmed - confirmed_tokens = [key for key, value in user_one.email_verifications.iteritems() if value['confirmed']] + confirmed_tokens = [key for key, value in user_one.email_verifications.items() if value['confirmed']] confirmed_count = user_one.emails.count() + len(confirmed_tokens) filtered_url = '{}?filter[confirmed]=True'.format(url) res = app.get(filtered_url, auth=user_one.auth) @@ -552,7 +552,7 @@ def test_delete_confirmed_but_unverified_email(self, app, user_one, unconfirmed_ assert res.status_code == 204 user_one.reload() - confirmed_tokens = [key for key, value in user_one.email_verifications.iteritems() if value['confirmed']] + confirmed_tokens = [key for key, value in user_one.email_verifications.items() if value['confirmed']] assert unconfirmed_token not in confirmed_tokens @pytest.mark.enable_quickfiles_creation diff --git a/api_tests/users/views/test_user_settings_detail.py b/api_tests/users/views/test_user_settings_detail.py index d0433037a41..cecbc9d0053 100644 --- a/api_tests/users/views/test_user_settings_detail.py +++ b/api_tests/users/views/test_user_settings_detail.py @@ -105,7 +105,7 @@ def test_update_two_factor_enabled(self, app, user_one, url, payload): assert res.json['data']['attributes']['two_factor_enabled'] is True user_one.reload() addon = user_one.get_addon('twofactor') - assert addon.deleted is False + assert addon.is_deleted is False assert addon.is_confirmed is False assert res.json['data']['attributes']['secret'] == addon.totp_secret_b32 assert res.json['data']['attributes']['two_factor_confirmed'] is False @@ -164,7 +164,7 @@ def test_update_two_factor_verification(self, mock_verify_code, app, user_one, u assert res.status_code == 200 user_one.reload() addon = user_one.get_addon('twofactor') - assert addon.deleted is False + assert addon.is_deleted is False assert addon.is_confirmed is True diff --git a/api_tests/wikis/views/test_wiki_content.py b/api_tests/wikis/views/test_wiki_content.py index e2b86bd9c69..e4f3adcc826 100644 --- a/api_tests/wikis/views/test_wiki_content.py +++ b/api_tests/wikis/views/test_wiki_content.py @@ -76,7 +76,7 @@ def test_user_cannot_get_withdrawn_registration_wiki_content(self): self._set_up_public_registration_with_wiki_page() withdrawal = self.public_registration.retract_registration( user=self.user, save=True) - token = withdrawal.approval_state.values()[0]['approval_token'] + token = list(withdrawal.approval_state.values())[0]['approval_token'] withdrawal.approve_retraction(self.user, token) withdrawal.save() res = self.app.get( diff --git a/api_tests/wikis/views/test_wiki_detail.py b/api_tests/wikis/views/test_wiki_detail.py index 2728261c1d9..b39912d4b65 100644 --- a/api_tests/wikis/views/test_wiki_detail.py +++ b/api_tests/wikis/views/test_wiki_detail.py @@ -29,6 +29,7 @@ def make_rename_payload(wiki_page): new_page_name = 'barbaz' + payload = { 'data': { 'id': wiki_page._id, @@ -276,7 +277,7 @@ def test_user_cannot_view_withdrawn_registration_wikis(self): with mock.patch('osf.models.AbstractNode.update_search'): withdrawal = self.public_registration.retract_registration( user=self.user, save=True) - token = withdrawal.approval_state.values()[0]['approval_token'] + token = list(withdrawal.approval_state.values())[0]['approval_token'] withdrawal.approve_retraction(self.user, token) withdrawal.save() res = self.app.get( diff --git a/api_tests/wikis/views/test_wiki_version_content.py b/api_tests/wikis/views/test_wiki_version_content.py index a5b442f1899..ecf4a0f89d6 100644 --- a/api_tests/wikis/views/test_wiki_version_content.py +++ b/api_tests/wikis/views/test_wiki_version_content.py @@ -83,7 +83,7 @@ def test_older_versions_content_can_be_accessed(self): def test_user_cannot_get_withdrawn_registration_wiki_content(self): self._set_up_public_registration_with_wiki_page() withdrawal = self.public_registration.retract_registration(user=self.user, save=True) - token = withdrawal.approval_state.values()[0]['approval_token'] + token = list(withdrawal.approval_state.values())[0]['approval_token'] withdrawal.approve_retraction(self.user, token) withdrawal.save() res = self.app.get(self.public_registration_url, auth=self.user.auth, expect_errors=True) diff --git a/api_tests/wikis/views/test_wiki_version_detail.py b/api_tests/wikis/views/test_wiki_version_detail.py index 0cf0c91575a..8345a8fadb9 100644 --- a/api_tests/wikis/views/test_wiki_version_detail.py +++ b/api_tests/wikis/views/test_wiki_version_detail.py @@ -124,7 +124,7 @@ def test_user_cannot_view_withdrawn_registration_wiki_versions(self): # TODO: Remove mocking when StoredFileNode is implemented with mock.patch('osf.models.AbstractNode.update_search'): withdrawal = self.public_registration.retract_registration(user=self.user, save=True) - token = withdrawal.approval_state.values()[0]['approval_token'] + token = list(withdrawal.approval_state.values())[0]['approval_token'] withdrawal.approve_retraction(self.user, token) withdrawal.save() res = self.app.get(self.public_registration_url, auth=self.user.auth, expect_errors=True) diff --git a/api_tests/wikis/views/test_wiki_versions_list.py b/api_tests/wikis/views/test_wiki_versions_list.py index 22999011f5c..c23869805ca 100644 --- a/api_tests/wikis/views/test_wiki_versions_list.py +++ b/api_tests/wikis/views/test_wiki_versions_list.py @@ -141,7 +141,7 @@ def test_return_wiki_versions(self, app, user, non_contrib, private_registration def test_wiki_versions_not_returned_for_withdrawn_registration(self, app, user, private_registration, private_registration_url): private_registration.is_public = True withdrawal = private_registration.retract_registration(user=user, save=True) - token = withdrawal.approval_state.values()[0]['approval_token'] + token = list(withdrawal.approval_state.values())[0]['approval_token'] # TODO: Remove mocking when StoredFileNode is implemented with mock.patch('osf.models.AbstractNode.update_search'): withdrawal.approve_retraction(user, token) diff --git a/framework/auth/views.py b/framework/auth/views.py index 79763936ce7..81802e27f21 100644 --- a/framework/auth/views.py +++ b/framework/auth/views.py @@ -528,8 +528,8 @@ def external_login_confirm_email_get(auth, uid, token): raise HTTPError(http_status.HTTP_400_BAD_REQUEST) verification = user.email_verifications[token] email = verification['email'] - provider = verification['external_identity'].keys()[0] - provider_id = verification['external_identity'][provider].keys()[0] + provider = list(verification['external_identity'].keys())[0] + provider_id = list(verification['external_identity'][provider].keys())[0] # wrong provider if provider not in user.external_identity: raise HTTPError(http_status.HTTP_400_BAD_REQUEST) @@ -631,7 +631,7 @@ def confirm_email_get(token, auth=None, **kwargs): except exceptions.EmailConfirmTokenError as e: raise HTTPError(http_status.HTTP_400_BAD_REQUEST, data={ 'message_short': e.message_short, - 'message_long': e.message_long + 'message_long': str(e) }) if is_initial_confirmation: @@ -709,7 +709,7 @@ def unconfirmed_email_add(auth=None): except exceptions.EmailConfirmTokenError as e: raise HTTPError(http_status.HTTP_400_BAD_REQUEST, data={ 'message_short': e.message_short, - 'message_long': e.message_long + 'message_long': str(e) }) user.save() @@ -849,7 +849,7 @@ def register_user(**kwargs): ) ) ) - except BlacklistedEmailError as e: + except BlacklistedEmailError: raise HTTPError( http_status.HTTP_400_BAD_REQUEST, data=dict(message_long=language.BLACKLISTED_EMAIL) @@ -857,7 +857,7 @@ def register_user(**kwargs): except ValidationError as e: raise HTTPError( http_status.HTTP_400_BAD_REQUEST, - data=dict(message_long=e.message) + data=dict(message_long=str(e)) ) if settings.CONFIRM_REGISTRATIONS_BY_EMAIL: diff --git a/framework/database/__init__.py b/framework/database/__init__.py index 22eb98220b8..d5eac968b07 100644 --- a/framework/database/__init__.py +++ b/framework/database/__init__.py @@ -17,11 +17,11 @@ def get_or_http_error(Model, pk_or_query, allow_deleted=False, display_name=None :param type Model: StoredObject subclass to query :param pk_or_query: :type pk_or_query: either - - a representation of the record's primary key, e.g. 'abcdef' + - a representation of the record's primary key, e.g. 'abcdef' - a subclass query to uniquely select a record, e.g. Q('title', 'eq', 'Entitled') & Q('version', 'eq', 1) :param bool allow_deleted: allow deleleted records? - :param basestring display_name: + :param str display_name: :raises: HTTPError(404) if the record does not exist :raises: HTTPError(400) if no unique record is found :raises: HTTPError(410) if the resource is deleted and allow_deleted = False @@ -65,9 +65,9 @@ def autoload(Model, extract_key, inject_key, func): an appropriate HTTPError (see #get_or_http_error) :param type Model: database collection model to query (should be a subclass of StoredObject) - :param basestring extract_key: named URL field containing the desired primary key to be fetched + :param str extract_key: named URL field containing the desired primary key to be fetched from the database - :param basestring inject_key: name the instance will be accessible as when it's injected as an + :param str inject_key: name the instance will be accessible as when it's injected as an argument to the function Example usage: :: diff --git a/framework/postcommit_tasks/handlers.py b/framework/postcommit_tasks/handlers.py index 5401db14c08..a6934c0ca5f 100644 --- a/framework/postcommit_tasks/handlers.py +++ b/framework/postcommit_tasks/handlers.py @@ -79,7 +79,7 @@ def enqueue_postcommit_task(fn, args, kwargs, celery=False, once_per_request=Tru # make a hash of the pertinent data raw = [fn.__name__, fn.__module__, args, kwargs] m = hashlib.md5() - m.update('-'.join([x.__repr__() for x in raw])) + m.update('-'.join([x.__repr__() for x in raw]).encode()) key = m.hexdigest() if not once_per_request: diff --git a/framework/routing/__init__.py b/framework/routing/__init__.py index b908dd20273..1dc3620071a 100644 --- a/framework/routing/__init__.py +++ b/framework/routing/__init__.py @@ -604,6 +604,6 @@ def render(self, data, redirect_url, *args, **kwargs): # Load extra data extra_data = self.data if isinstance(self.data, dict) else self.data() - data.update({key: val for key, val in extra_data.iteritems() if key not in data}) + data.update({key: val for key, val in extra_data.items() if key not in data}) return self._render(data, template_name) diff --git a/framework/status/__init__.py b/framework/status/__init__.py index 4cb994c0fa8..6c616985fe5 100644 --- a/framework/status/__init__.py +++ b/framework/status/__init__.py @@ -32,7 +32,7 @@ def push_status_message(message, kind='warning', dismissible=True, trust=True, j try: statuses = session.data.get('status') except RuntimeError as e: - exception_message = getattr(e, 'message', None) + exception_message = str(e) if 'Working outside of request context.' in exception_message: # Working outside of request context, so should be a DRF issue. Status messages are not appropriate there. # If it's any kind of notification, then it doesn't make sense to send back to the API routes. diff --git a/osf/external/chronos.py b/osf/external/chronos.py index 139d0e435c9..caf48790d23 100644 --- a/osf/external/chronos.py +++ b/osf/external/chronos.py @@ -83,13 +83,20 @@ def serialize_manuscript(cls, journal_id, preprint, status=ChronosSubmissionStat @classmethod def serialize_user(cls, user): + if not bool(user.given_name) and not bool(user.family_name): + raise ValueError( + 'Cannot submit because user {} requires a given and family name'.format(user.given_name if user.given_name and user.family_name else user.fullname) + ) + return { 'CHRONOS_USER_ID': user.chronos_user_id, 'EMAIL': user.username, - 'GIVEN_NAME': user.given_name if str(user.given_name) and str(user.family_name) else user.fullname, + 'GIVEN_NAME': user.given_name, + 'FULLNAME': user.fullname, + 'MIDDLE_NAME': user.middle_names, 'ORCID_ID': user.social.get('orcid', None), 'PARTNER_USER_ID': user._id, - 'SURNAME': user.family_name if str(user.given_name) and str(user.family_name) else None, + 'SURNAME': user.family_name, } @classmethod diff --git a/osf/management/commands/addon_deleted_date.py b/osf/management/commands/addon_deleted_date.py new file mode 100644 index 00000000000..26e89346f1c --- /dev/null +++ b/osf/management/commands/addon_deleted_date.py @@ -0,0 +1,96 @@ +import datetime +import logging + +from django.core.management.base import BaseCommand +from django.db import connection, transaction +from framework.celery_tasks import app as celery_app + +logger = logging.getLogger(__name__) + +TABLES_TO_POPULATE_WITH_MODIFIED = [ + 'addons_zotero_usersettings', + 'addons_dropbox_usersettings', + 'addons_dropbox_nodesettings', + 'addons_figshare_nodesettings', + 'addons_figshare_usersettings', + 'addons_forward_nodesettings', + 'addons_github_nodesettings', + 'addons_github_usersettings', + 'addons_gitlab_nodesettings', + 'addons_gitlab_usersettings', + 'addons_googledrive_nodesettings', + 'addons_googledrive_usersettings', + 'addons_mendeley_nodesettings', + 'addons_mendeley_usersettings', + 'addons_onedrive_nodesettings', + 'addons_onedrive_usersettings', + 'addons_osfstorage_nodesettings', + 'addons_osfstorage_usersettings', + 'addons_bitbucket_nodesettings', + 'addons_bitbucket_usersettings', + 'addons_owncloud_nodesettings', + 'addons_box_nodesettings', + 'addons_owncloud_usersettings', + 'addons_box_usersettings', + 'addons_dataverse_nodesettings', + 'addons_dataverse_usersettings', + 'addons_s3_nodesettings', + 'addons_s3_usersettings', + 'addons_twofactor_usersettings', + 'addons_wiki_nodesettings', + 'addons_zotero_nodesettings' +] + +UPDATE_DELETED_WITH_MODIFIED = """UPDATE {} SET deleted=modified + WHERE id IN (SELECT id FROM {} WHERE is_deleted AND deleted IS NULL LIMIT {}) RETURNING id;""" + +@celery_app.task(name='management.commands.addon_deleted_date') +def populate_deleted(dry_run=False, page_size=1000): + with transaction.atomic(): + for table in TABLES_TO_POPULATE_WITH_MODIFIED: + run_statements(UPDATE_DELETED_WITH_MODIFIED, page_size, table) + if dry_run: + raise RuntimeError('Dry Run -- Transaction rolled back') + +def run_statements(statement, page_size, table): + logger.info('Populating deleted column in table {}'.format(table)) + with connection.cursor() as cursor: + cursor.execute(statement.format(table, table, page_size)) + rows = cursor.fetchall() + if rows: + logger.info('Table {} still has rows to populate'.format(table)) + +class Command(BaseCommand): + help = '''Populates new deleted field for various models. Ensure you have run migrations + before running this script.''' + + def add_arguments(self, parser): + parser.add_argument( + '--dry_run', + type=bool, + default=False, + help='Run queries but do not write files', + ) + parser.add_argument( + '--page_size', + type=int, + default=1000, + help='How many rows to process at a time', + ) + + def handle(self, *args, **options): + script_start_time = datetime.datetime.now() + logger.info('Script started time: {}'.format(script_start_time)) + logger.debug(options) + + dry_run = options['dry_run'] + page_size = options['page_size'] + + if dry_run: + logger.info('DRY RUN') + + populate_deleted(dry_run, page_size) + + script_finish_time = datetime.datetime.now() + logger.info('Script finished time: {}'.format(script_finish_time)) + logger.info('Run time {}'.format(script_finish_time - script_start_time)) diff --git a/osf/management/commands/export_user_account.py b/osf/management/commands/export_user_account.py index 8ed6352d2fc..84a7318e0a3 100644 --- a/osf/management/commands/export_user_account.py +++ b/osf/management/commands/export_user_account.py @@ -225,7 +225,7 @@ def export_account(user_id, path, only_private=False, only_admin=False, export_f """ user = OSFUser.objects.get(guids___id=user_id, guids___id__isnull=False) - proceed = raw_input('\nUser has {:.2f} GB of data in OSFStorage that will be exported.\nWould you like to continue? [y/n] '.format(get_usage(user))) + proceed = input('\nUser has {:.2f} GB of data in OSFStorage that will be exported.\nWould you like to continue? [y/n] '.format(get_usage(user))) if not proceed or proceed.lower() != 'y': print('Exiting...') exit(1) diff --git a/osf/management/commands/migrate_deleted_date.py b/osf/management/commands/migrate_deleted_date.py new file mode 100644 index 00000000000..3573c713192 --- /dev/null +++ b/osf/management/commands/migrate_deleted_date.py @@ -0,0 +1,112 @@ +import datetime +import logging + +from django.core.management.base import BaseCommand +from django.db import connection, transaction +from framework.celery_tasks import app as celery_app +from framework import sentry + +logger = logging.getLogger(__name__) + +LIMIT_CLAUSE = ' LIMIT %s) RETURNING id;' +NO_LIMIT_CLAUSE = ');' + +TABLES_TO_POPULATE_WITH_MODIFIED = [ + 'osf_comment', + 'osf_institution', + 'osf_privatelink' +] + +POPULATE_BASE_FILE_NODE = """UPDATE osf_basefilenode SET deleted = deleted_on + WHERE id IN (SELECT id FROM osf_basefilenode WHERE deleted_on IS NOT NULL AND deleted IS NULL{}""" +CHECK_BASE_FILE_NODE = """SELECT deleted, deleted_on FROM osf_basefilenode WHERE deleted_on IS NOT NULL AND deleted IS NULL""" + +POPULATE_ABSTRACT_NODE = """UPDATE osf_abstractnode SET deleted = CASE WHEN deleted_date IS NOT NULL THEN deleted_date ELSE last_logged END + WHERE id IN (SELECT id FROM osf_abstractnode WHERE is_deleted AND deleted IS NULL{}""" +CHECK_ABSTRACT_NODE = """SELECT deleted, deleted_date FROM osf_abstractnode WHERE is_deleted AND deleted IS NULL""" + +UPDATE_DELETED_WITH_MODIFIED = """UPDATE {} SET deleted=modified +WHERE id IN (SELECT id FROM {} WHERE is_deleted AND deleted IS NULL{}""" + +CHECK_POPULATED = """SELECT deleted, is_deleted FROM {} WHERE deleted IS NULL AND is_deleted ;""" + +FORWARD_BASE_FILE = POPULATE_BASE_FILE_NODE.format(NO_LIMIT_CLAUSE) +FORWARD_ABSTRACT_NODE = POPULATE_ABSTRACT_NODE.format(NO_LIMIT_CLAUSE) + +REVERSE_BASE_FILE = 'UPDATE osf_basefilenode SET deleted = null' +REVERSE_ABSTRACT_NODE = 'UPDATE osf_abstractnode SET deleted = null' + +FORWARD_COMMENT = UPDATE_DELETED_WITH_MODIFIED.format('osf_comment', 'osf_comment', NO_LIMIT_CLAUSE) +FORWARD_INSTITUTION = UPDATE_DELETED_WITH_MODIFIED.format('osf_institution', 'osf_institution', NO_LIMIT_CLAUSE) +FORWARD_PRIVATE_LINK = UPDATE_DELETED_WITH_MODIFIED.format('osf_privatelink', 'osf_privatelink', NO_LIMIT_CLAUSE) + +REVERSE_COMMENT = 'UPDATE osf_comment SET deleted = null' +REVERSE_INSTITUTION = 'UPDATE osf_institution SET deleted = null' +REVERSE_PRIVATE_LINK = 'UPDATE osf_privatelink SET deleted = null' + +@celery_app.task(name='management.commands.migrate_deleted_date') +def populate_deleted(dry_run=False, page_size=1000): + with transaction.atomic(): + for table in TABLES_TO_POPULATE_WITH_MODIFIED: + run_statements(UPDATE_DELETED_WITH_MODIFIED, page_size, table) + run_sql(POPULATE_BASE_FILE_NODE, CHECK_BASE_FILE_NODE, page_size) + run_sql(POPULATE_ABSTRACT_NODE, CHECK_ABSTRACT_NODE, page_size) + if dry_run: + raise RuntimeError('Dry Run -- Transaction rolled back') + +def run_statements(statement, page_size, table): + logger.info('Populating deleted column in table {}'.format(table)) + with connection.cursor() as cursor: + cursor.execute(statement.format(table, table, LIMIT_CLAUSE), [page_size]) + rows = cursor.fetchall() + if rows: + cursor.execute(CHECK_POPULATED.format(table), [page_size]) + remaining_rows = cursor.fetchall() + if not remaining_rows: + sentry.log_message('Deleted field in {} table is populated'.format(table)) + +def run_sql(statement, check_statement, page_size): + table = statement.split(' ')[1] + logger.info('Populating deleted column in table {}'.format(table)) + with connection.cursor() as cursor: + cursor.execute(statement.format(LIMIT_CLAUSE), [page_size]) + rows = cursor.fetchall() + if not rows: + with connection.cursor() as cursor: + cursor.execute(check_statement, [page_size]) + sentry.log_message('Deleted field in {} table is populated'.format(table)) + +class Command(BaseCommand): + help = '''Populates new deleted field for various models. Ensure you have run migrations + before running this script.''' + + def add_arguments(self, parser): + parser.add_argument( + '--dry_run', + type=bool, + default=False, + help='Run queries but do not write files', + ) + parser.add_argument( + '--page_size', + type=int, + default=10000, + help='How many rows to process at a time', + ) + + def handle(self, *args, **options): + script_start_time = datetime.datetime.now() + logger.info('Script started time: {}'.format(script_start_time)) + logger.debug(options) + + dry_run = options['dry_run'] + page_size = options['page_size'] + + if dry_run: + logger.info('DRY RUN') + + populate_deleted(dry_run, page_size) + + script_finish_time = datetime.datetime.now() + logger.info('Script finished time: {}'.format(script_finish_time)) + logger.info('Run time {}'.format(script_finish_time - script_start_time)) diff --git a/osf/management/commands/restore_deleted_root_folders.py b/osf/management/commands/restore_deleted_root_folders.py new file mode 100644 index 00000000000..4dfabaae56e --- /dev/null +++ b/osf/management/commands/restore_deleted_root_folders.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +import logging +import datetime + +from bulk_update.helper import bulk_update +from django.core.management.base import BaseCommand + +from osf.models import BaseFileNode + +logger = logging.getLogger(__name__) + +def restore_deleted_root_folders(dry_run=False): + deleted_roots = BaseFileNode.objects.filter( + type='osf.trashedfolder', + is_root=True, + name='', + provider='osfstorage' + ) + + logger.info('Restoring {} deleted osfstorage root folders'.format(len(deleted_roots))) + + for i, folder in enumerate(deleted_roots, 1): + folder.deleted_on = None + folder.type = 'osf.osfstoragefolder' + + if not dry_run: + bulk_update(deleted_roots, update_fields=['deleted_on', 'type']) + + +class Command(BaseCommand): + """Restore deleted osfstorage root folders + """ + def add_arguments(self, parser): + parser.add_argument( + '--dry_run', + type=bool, + default=False, + help='Run queries but do not write files', + ) + + def handle(self, *args, **options): + script_start_time = datetime.datetime.now() + logger.info('Script started time: {}'.format(script_start_time)) + + dry_run = options['dry_run'] + + if dry_run: + logger.info('DRY RUN. Data will not be saved.') + + restore_deleted_root_folders(dry_run) + + script_finish_time = datetime.datetime.now() + logger.info('Script finished time: {}'.format(script_finish_time)) + logger.info('Run time {}'.format(script_finish_time - script_start_time)) diff --git a/osf/metadata/utils.py b/osf/metadata/utils.py index 9caacbde23f..3b9a6cb5bbd 100644 --- a/osf/metadata/utils.py +++ b/osf/metadata/utils.py @@ -40,10 +40,10 @@ def datacite_format_contributors(contributors): ] if contributor.external_identity.get('ORCID'): - verified = contributor.external_identity['ORCID'].values()[0] == 'VERIFIED' + verified = list(contributor.external_identity['ORCID'].values())[0] == 'VERIFIED' if verified: name_identifiers.append({ - 'nameIdentifier': contributor.external_identity['ORCID'].keys()[0], + 'nameIdentifier': list(contributor.external_identity['ORCID'].keys())[0], 'nameIdentifierScheme': 'ORCID', 'schemeURI': 'http://orcid.org/' }) diff --git a/osf/migrations/0032_unquote_gd_nodesettings_folder_path.py b/osf/migrations/0032_unquote_gd_nodesettings_folder_path.py index 39169c239bb..1ca7a925cf1 100644 --- a/osf/migrations/0032_unquote_gd_nodesettings_folder_path.py +++ b/osf/migrations/0032_unquote_gd_nodesettings_folder_path.py @@ -1,7 +1,8 @@ # -*- coding: utf-8 -*- # Generated by Django 1.11.1 on 2017-05-30 19:21 from __future__ import unicode_literals -from urllib2 import quote, unquote +from future.moves.urllib.parse import quote, unquote + from django_bulk_update.helper import bulk_update from django.db import migrations diff --git a/osf/migrations/0171_add_registration_files_count.py b/osf/migrations/0171_add_registration_files_count.py index 3fad583927f..5e2ef3e5c17 100644 --- a/osf/migrations/0171_add_registration_files_count.py +++ b/osf/migrations/0171_add_registration_files_count.py @@ -2,12 +2,9 @@ # Generated by Django 1.11.15 on 2019-01-18 14:43 from __future__ import unicode_literals import logging -from tqdm import tqdm from django.db import migrations, models -from django.db.models import Count from django_bulk_update.helper import bulk_update -from osf.models import Registration logger = logging.getLogger(__file__) @@ -19,23 +16,25 @@ def add_registration_files_count(state, *args, **kwargs): relationship for speed purposes in this migration. If this model changes significantly, this migration may have to be modified in the future so it runs on an empty db. """ - registrations = Registration.objects.filter(is_deleted=False).filter( - files__type='osf.osfstoragefile', - files__deleted_on__isnull=True - ).annotate( - annotated_file_count=Count('files') - ) - progress_bar = tqdm(total=registrations.count()) + Registration = state.get_model('osf', 'registration') + registrations = Registration.objects.filter(is_deleted=False, files_count__isnull=True) + BaseFileNode = state.get_model('osf', 'BaseFileNode') + ContentType = state.get_model('contenttypes', 'ContentType') + content_type = ContentType.objects.get(app_label='osf', model='abstractnode') registrations_to_update = [] - for i, registration in enumerate(registrations, 1): - progress_bar.update(i) - registration.files_count = registration.annotated_file_count + for registration in registrations: + registration_files = BaseFileNode.objects.filter( + target_object_id=registration.id, + target_content_type=content_type, + type='osf.osfstoragefile', + deleted_on__isnull=True, + ) + registration.files_count = registration_files.count() registrations_to_update.append(registration) bulk_update(registrations_to_update, update_fields=['files_count'], batch_size=5000) logger.info('Populated `files_count` on a total of {} registrations'.format(len(registrations_to_update))) - progress_bar.close() def noop(*args, **kwargs): diff --git a/osf/migrations/0188_deleted_field_mig.py b/osf/migrations/0188_deleted_field_mig.py new file mode 100644 index 00000000000..142bdc7b1db --- /dev/null +++ b/osf/migrations/0188_deleted_field_mig.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.15 on 2019-06-27 20:29 +from __future__ import unicode_literals + +from django.db import migrations +import osf.utils.fields + + +class Migration(migrations.Migration): + + dependencies = [ + ('osf', '0187_remove_outdated_contributor_permissions'), + ] + + operations = [ + migrations.AddField( + model_name='abstractnode', + name='deleted', + field=osf.utils.fields.NonNaiveDateTimeField(blank=True, null=True), + ), + migrations.AddField( + model_name='basefilenode', + name='deleted', + field=osf.utils.fields.NonNaiveDateTimeField(blank=True, null=True), + ), + migrations.AddField( + model_name='comment', + name='deleted', + field=osf.utils.fields.NonNaiveDateTimeField(blank=True, null=True), + ), + migrations.AddField( + model_name='privatelink', + name='deleted', + field=osf.utils.fields.NonNaiveDateTimeField(blank=True, null=True), + ), + migrations.AddField( + model_name='institution', + name='deleted', + field=osf.utils.fields.NonNaiveDateTimeField(blank=True, null=True), + ), + ] diff --git a/osf/migrations/0189_deleted_field_data.py b/osf/migrations/0189_deleted_field_data.py new file mode 100644 index 00000000000..ec000aa196f --- /dev/null +++ b/osf/migrations/0189_deleted_field_data.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.15 on 2018-11-12 17:18 +from __future__ import unicode_literals + +import logging + +from django.db import migrations + +from osf.management.commands.migrate_deleted_date import ( + FORWARD_BASE_FILE, + FORWARD_ABSTRACT_NODE, + FORWARD_COMMENT, + FORWARD_INSTITUTION, + FORWARD_PRIVATE_LINK, + REVERSE_BASE_FILE, + REVERSE_ABSTRACT_NODE, + REVERSE_COMMENT, + REVERSE_INSTITUTION, + REVERSE_PRIVATE_LINK, +) +from website.settings import DEBUG_MODE + +logger = logging.getLogger(__name__) + + +class Migration(migrations.Migration): + + dependencies = [ + ('osf', '0188_deleted_field_mig'), + ] + + if DEBUG_MODE: + operations = [ + migrations.RunSQL(FORWARD_BASE_FILE, REVERSE_BASE_FILE), + migrations.RunSQL(FORWARD_INSTITUTION, REVERSE_INSTITUTION), + migrations.RunSQL(FORWARD_ABSTRACT_NODE, REVERSE_ABSTRACT_NODE), + migrations.RunSQL(FORWARD_PRIVATE_LINK, REVERSE_PRIVATE_LINK), + migrations.RunSQL(FORWARD_COMMENT, REVERSE_COMMENT) + ] + else: + operations = [] + logger.info( + 'The automatic migration only runs in DEBUG_MODE. Use management command migrate_deleted_date instead' + ) diff --git a/osf/models/base.py b/osf/models/base.py index 19ff0c1cfc5..7f20f84e1ad 100644 --- a/osf/models/base.py +++ b/osf/models/base.py @@ -56,8 +56,7 @@ def explain(self, *args): cursor.execute('explain analyze verbose %s' % query, params) return '\n'.join(r[0] for r in cursor.fetchall()) - -QuerySet.__bases__ += (QuerySetExplainMixin,) +QuerySet = type('QuerySet', (QuerySetExplainMixin, QuerySet), dict(QuerySet.__dict__)) class BaseModel(TimeStampedModel, QuerySetExplainMixin): diff --git a/osf/models/comment.py b/osf/models/comment.py index 5e25df99178..10a380b6ef1 100644 --- a/osf/models/comment.py +++ b/osf/models/comment.py @@ -9,6 +9,7 @@ from osf.models.mixins import CommentableMixin from osf.models.spam import SpamMixin from osf.models import validators +from osf.utils.fields import NonNaiveDateTimeField from framework.exceptions import PermissionsError from website import settings @@ -39,6 +40,7 @@ class Comment(GuidMixin, SpamMixin, CommentableMixin, BaseModel): edited = models.BooleanField(default=False) is_deleted = models.BooleanField(default=False) + deleted = NonNaiveDateTimeField(blank=True, null=True) # The type of root_target: node/files page = models.CharField(max_length=255, blank=True) content = models.TextField( @@ -215,8 +217,10 @@ def delete(self, auth, save=False): 'comment': self._id, } self.is_deleted = True + current_time = timezone.now() + self.deleted = current_time log_dict.update(self.root_target.referent.get_extra_log_params(self)) - self.modified = timezone.now() + self.modified = current_time if save: self.save() self.node.add_log( @@ -231,6 +235,7 @@ def undelete(self, auth, save=False): if not self.node.can_comment(auth) or self.user._id != auth.user._id: raise PermissionsError('{0!r} does not have permission to comment on this node'.format(auth.user)) self.is_deleted = False + self.deleted = None log_dict = { 'project': self.node.parent_id, 'node': self.node._id, diff --git a/osf/models/conference.py b/osf/models/conference.py index 2c7faac0f31..ae06450c7ab 100644 --- a/osf/models/conference.py +++ b/osf/models/conference.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -import urlparse +from future.moves.urllib.parse import urljoin from django.db import models from osf.models.base import BaseModel, ObjectIDMixin @@ -77,7 +77,7 @@ def get_by_endpoint(cls, endpoint, active): @property def absolute_url(self): - return urlparse.urljoin(settings.DOMAIN, '/view/{}'.format(self.endpoint)) + return urljoin(settings.DOMAIN, '/view/{}'.format(self.endpoint)) @property def valid_submissions(self): diff --git a/osf/models/external.py b/osf/models/external.py index 3c6b03a14b3..3f3dbae3768 100644 --- a/osf/models/external.py +++ b/osf/models/external.py @@ -97,7 +97,7 @@ def __init__(cls, name, bases, dct): PROVIDER_LOOKUP[cls.short_name] = cls -class ExternalProvider(with_metaclass(ExternalProviderMeta)): +class ExternalProvider(object, with_metaclass(ExternalProviderMeta)): """A connection to an external service (ex: GitHub). This object contains no credentials, and is not saved in the database. diff --git a/osf/models/files.py b/osf/models/files.py index 51e41b6a219..a1c9efca18f 100644 --- a/osf/models/files.py +++ b/osf/models/files.py @@ -106,6 +106,7 @@ class BaseFileNode(TypedModel, CommentableMixin, OptionalGuidMixin, Taggable, Ob is_deleted = False deleted_on = NonNaiveDateTimeField(blank=True, null=True) + deleted = NonNaiveDateTimeField(blank=True, null=True) deleted_by = models.ForeignKey('osf.OSFUser', related_name='files_deleted_by', null=True, blank=True, on_delete=models.CASCADE) objects = BaseFileNodeManager() @@ -418,14 +419,20 @@ def delete(self, user=None, parent=None, save=True, deleted_on=None): :param deleted_on: :return: """ - self.deleted_by = user - self.deleted_on = deleted_on = deleted_on or timezone.now() + deleted = deleted_on + if not self.is_root: + self.deleted_by = user + self.deleted = deleted_on or timezone.now() + deleted = self.deleted + # This will need to be removed + self.deleted_on = deleted if not self.is_file: - self.recast(TrashedFolder._typedmodels_type) + if not self.is_root: + self.recast(TrashedFolder._typedmodels_type) for child in BaseFileNode.objects.filter(parent=self.id).exclude(type__in=TrashedFileNode._typedmodels_subtypes): - child.delete(user=user, save=save, deleted_on=deleted_on) + child.delete(user=user, save=save, deleted_on=deleted) else: self.recast(TrashedFile._typedmodels_type) diff --git a/osf/models/institution.py b/osf/models/institution.py index 98338486a3a..2957144df97 100644 --- a/osf/models/institution.py +++ b/osf/models/institution.py @@ -7,6 +7,7 @@ from django.contrib.postgres import fields from django.core.urlresolvers import reverse from django.db import models +from osf.utils.fields import NonNaiveDateTimeField from osf.models import base from osf.models.contributor import InstitutionalContributor from osf.models.mixins import Loggable @@ -54,6 +55,7 @@ class Institution(DirtyFieldsMixin, Loggable, base.ObjectIDMixin, base.BaseModel ) is_deleted = models.BooleanField(default=False, db_index=True) + deleted = NonNaiveDateTimeField(null=True, blank=True) class Meta: # custom permissions for use in the OSF Admin App diff --git a/osf/models/metaschema.py b/osf/models/metaschema.py index d03a7a39578..eeed290de9c 100644 --- a/osf/models/metaschema.py +++ b/osf/models/metaschema.py @@ -123,9 +123,9 @@ def validate_metadata(self, metadata, reviewer=False, required_fields=False): raise ValidationError( 'For your registration your response to the \'{}\' field is invalid.'.format(question['title']), ) - raise ValidationError(e.message) + raise ValidationError(e) except jsonschema.SchemaError as e: - raise ValidationValueError(e.message) + raise ValidationValueError(e) return diff --git a/osf/models/mixins.py b/osf/models/mixins.py index 62cc18ffbd0..6236811f74c 100644 --- a/osf/models/mixins.py +++ b/osf/models/mixins.py @@ -226,8 +226,8 @@ def get_oauth_addons(self): if hasattr(addon, 'oauth_provider') ] - def has_addon(self, addon_name, deleted=False): - return bool(self.get_addon(addon_name, deleted=deleted)) + def has_addon(self, addon_name, is_deleted=False): + return bool(self.get_addon(addon_name, is_deleted=is_deleted)) def get_addon_names(self): return [each.short_name for each in self.get_addons()] @@ -238,7 +238,7 @@ def get_or_add_addon(self, name, *args, **kwargs): return addon return self.add_addon(name, *args, **kwargs) - def get_addon(self, name, deleted=False): + def get_addon(self, name, is_deleted=False): try: settings_model = self._settings_model(name) except LookupError: @@ -247,7 +247,7 @@ def get_addon(self, name, deleted=False): return None try: settings_obj = settings_model.objects.get(owner=self) - if not settings_obj.deleted or deleted: + if not settings_obj.is_deleted or is_deleted: return settings_obj except ObjectDoesNotExist: pass @@ -269,7 +269,7 @@ def add_addon(self, addon_name, auth=None, override=False, _force=False): return False # Reactivate deleted add-on if present - addon = self.get_addon(addon_name, deleted=True) + addon = self.get_addon(addon_name, is_deleted=True) if addon: if addon.deleted: addon.undelete(save=True) diff --git a/osf/models/node.py b/osf/models/node.py index f5cb9735edd..558345ce056 100644 --- a/osf/models/node.py +++ b/osf/models/node.py @@ -320,7 +320,7 @@ class AbstractNode(DirtyFieldsMixin, TypedModel, AddonModelMixin, IdentifierMixi affiliated_institutions = models.ManyToManyField('Institution', related_name='nodes') category = models.CharField(max_length=255, - choices=CATEGORY_MAP.items(), + choices=list(CATEGORY_MAP.items()), blank=True, default='') # Dictionary field mapping user id to a list of nodes in node.nodes which the user has subscriptions for @@ -337,6 +337,7 @@ class AbstractNode(DirtyFieldsMixin, TypedModel, AddonModelMixin, IdentifierMixi on_delete=models.SET_NULL, null=True, blank=True) deleted_date = NonNaiveDateTimeField(null=True, blank=True) + deleted = NonNaiveDateTimeField(null=True, blank=True) description = models.TextField(blank=True, default='') file_guid_to_share_uuids = DateTimeAwareJSONField(default=dict, blank=True) forked_date = NonNaiveDateTimeField(db_index=True, null=True, blank=True) @@ -2277,11 +2278,12 @@ def remove_node(self, auth, date=None): for node in hierarchy: # Add log to parents node.is_deleted = True - node.deleted_date = date + node.deleted_date = log_date + node.deleted = log_date node.add_remove_node_log(auth=auth, date=log_date) project_signals.node_deleted.send(node) - bulk_update(hierarchy, update_fields=['is_deleted', 'deleted_date']) + bulk_update(hierarchy, update_fields=['is_deleted', 'deleted_date', 'deleted']) if len(hierarchy.filter(is_public=True)): AbstractNode.bulk_update_search(hierarchy.filter(is_public=True)) @@ -2472,6 +2474,7 @@ def is_bookmark_collection(self): """For v1 compat""" return False + def remove_addons(auth, resource_object_list): for config in AbstractNode.ADDONS_AVAILABLE: try: @@ -2480,7 +2483,7 @@ def remove_addons(auth, resource_object_list): settings_model = None if settings_model: - addon_list = settings_model.objects.filter(owner__in=resource_object_list, deleted=False) + addon_list = settings_model.objects.filter(owner__in=resource_object_list, is_deleted=False) for addon in addon_list: addon.after_delete(auth.user) diff --git a/osf/models/nodelog.py b/osf/models/nodelog.py index ce9580ee0d0..6d5664e220c 100644 --- a/osf/models/nodelog.py +++ b/osf/models/nodelog.py @@ -167,7 +167,7 @@ class NodeLog(ObjectIDMixin, BaseModel): original_node = models.ForeignKey('AbstractNode', db_index=True, null=True, blank=True, on_delete=models.CASCADE) - def __unicode__(self): + def __repr__(self): return ('({self.action!r}, user={self.user!r},, node={self.node!r}, params={self.params!r}) ' 'with id {self.id!r}').format(self=self) diff --git a/osf/models/oauth.py b/osf/models/oauth.py index 33b74d7b733..c2bc8fcdba2 100644 --- a/osf/models/oauth.py +++ b/osf/models/oauth.py @@ -9,7 +9,7 @@ from framework.auth import cas from website import settings -from urlparse import urljoin +from future.moves.urllib.parse import urljoin def generate_client_secret(): diff --git a/osf/models/private_link.py b/osf/models/private_link.py index ad9d286fced..9f752eab7af 100644 --- a/osf/models/private_link.py +++ b/osf/models/private_link.py @@ -6,12 +6,13 @@ from osf.models.base import BaseModel, ObjectIDMixin from osf.utils.sanitize import unescape_entities - +from osf.utils.fields import NonNaiveDateTimeField class PrivateLink(ObjectIDMixin, BaseModel): key = models.CharField(max_length=512, null=False, unique=True, blank=False) name = models.CharField(max_length=255, blank=True, null=True) is_deleted = models.BooleanField(default=False) + deleted = NonNaiveDateTimeField(blank=True, null=True) anonymous = models.BooleanField(default=False) nodes = models.ManyToManyField('AbstractNode', related_name='private_links') diff --git a/osf/models/registrations.py b/osf/models/registrations.py index bb104dbe35b..92f119fda88 100644 --- a/osf/models/registrations.py +++ b/osf/models/registrations.py @@ -373,6 +373,7 @@ def copy_unclaimed_records(self): def delete_registration_tree(self, save=False): logger.debug('Marking registration {} as deleted'.format(self._id)) self.is_deleted = True + self.deleted = timezone.now() for draft_registration in DraftRegistration.objects.filter(registered_node=self): # Allow draft registration to be submitted if draft_registration.approval: diff --git a/osf/models/sanctions.py b/osf/models/sanctions.py index b640912134b..0efa323492b 100644 --- a/osf/models/sanctions.py +++ b/osf/models/sanctions.py @@ -522,6 +522,7 @@ def _on_reject(self, user): # Delete parent registration if it was created at the time the embargo was initiated if not self.for_existing_registration: parent_registration.is_deleted = True + parent_registration.deleted = timezone.now() parent_registration.save() def disapprove_embargo(self, user, token): diff --git a/osf/utils/functional.py b/osf/utils/functional.py index 000dbe16eca..ca25dd5dbc8 100644 --- a/osf/utils/functional.py +++ b/osf/utils/functional.py @@ -1,3 +1,4 @@ +from past.builtins import basestring import collections # Function courtesy of @brianjgeiger and @abought diff --git a/osf/utils/sanitize.py b/osf/utils/sanitize.py index a118a650d19..56598ed1c12 100644 --- a/osf/utils/sanitize.py +++ b/osf/utils/sanitize.py @@ -1,3 +1,4 @@ +from past.builtins import basestring import json import collections diff --git a/osf/utils/tokens/__init__.py b/osf/utils/tokens/__init__.py index 3b413869a23..14876b05fd9 100644 --- a/osf/utils/tokens/__init__.py +++ b/osf/utils/tokens/__init__.py @@ -36,7 +36,7 @@ def from_string(cls, encoded_token): http_status.HTTP_400_BAD_REQUEST, data={ 'message_short': 'Bad request', - 'message_long': e.message + 'message_long': str(e) } ) return cls(encoded_token=encoded_token, payload=payload) diff --git a/osf/utils/tokens/handlers.py b/osf/utils/tokens/handlers.py index 2b7fdf01cbc..99e850a6302 100644 --- a/osf/utils/tokens/handlers.py +++ b/osf/utils/tokens/handlers.py @@ -90,7 +90,7 @@ def sanction_handler(kind, action, payload, encoded_token, auth, **kwargs): except TokenError as e: raise HTTPError(http_status.HTTP_400_BAD_REQUEST, data={ 'message_short': e.message_short, - 'message_long': e.message_long + 'message_long': str(e) }) except PermissionsError as e: raise HTTPError(http_status.HTTP_401_UNAUTHORIZED, data={ diff --git a/osf/utils/workflows.py b/osf/utils/workflows.py index 363ac9d0a18..e0ee5cf7d41 100644 --- a/osf/utils/workflows.py +++ b/osf/utils/workflows.py @@ -9,7 +9,7 @@ class ChoiceEnum(Enum): @classmethod def choices(cls): - return tuple((v, unicode(v).title()) for v in cls.values()) + return tuple((v, str(v).title()) for v in cls.values() if v is not None) @classmethod def values(cls): diff --git a/osf_tests/factories.py b/osf_tests/factories.py index b8499447e31..967612b8497 100644 --- a/osf_tests/factories.py +++ b/osf_tests/factories.py @@ -186,6 +186,13 @@ class BaseNodeFactory(DjangoModelFactory): class Meta: model = models.Node + #Fix for adding the deleted date. + @classmethod + def _create(cls, *args, **kwargs): + if kwargs.get('is_deleted', None): + kwargs['deleted'] = timezone.now() + return super(BaseNodeFactory, cls)._create(*args, **kwargs) + class ProjectFactory(BaseNodeFactory): category = 'project' @@ -270,6 +277,14 @@ class Meta: anonymous = False creator = factory.SubFactory(UserFactory) + @classmethod + def _create(cls, target_class, *args, **kwargs): + instance = super(PrivateLinkFactory, cls)._create(target_class, *args, **kwargs) + if instance.is_deleted and not instance.deleted: + instance.deleted = timezone.now() + instance.save() + return instance + class CollectionFactory(DjangoModelFactory): class Meta: @@ -418,7 +433,7 @@ def _create(cls, *args, **kwargs): registration.retract_registration(user) withdrawal = registration.retraction - token = withdrawal.approval_state.values()[0]['approval_token'] + token = list(withdrawal.approval_state.values())[0]['approval_token'] with patch('osf.models.AbstractNode.update_search'): withdrawal.approve_retraction(user, token) withdrawal.save() diff --git a/osf_tests/management_commands/test_migrate_deleted_date.py b/osf_tests/management_commands/test_migrate_deleted_date.py new file mode 100644 index 00000000000..dd5b7e94fb8 --- /dev/null +++ b/osf_tests/management_commands/test_migrate_deleted_date.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- +import pytest + +from framework.auth import Auth +from addons.osfstorage.models import OsfStorageFolder +from osf_tests.factories import ( + ProjectFactory, + RegionFactory, + UserFactory, + CommentFactory, +) +from tests.base import DbTestCase +from osf.management.commands import migrate_deleted_date + +class TestMigrateDeletedDate(DbTestCase): + + def setUp(self): + super(TestMigrateDeletedDate, self).setUp() + self.region_us = RegionFactory(_id='US', name='United States') + + @pytest.fixture() + def project(self, user, is_public=True, is_deleted=False, region=None, parent=None): + if region is None: + region = self.region_us + project = ProjectFactory(creator=user, is_public=is_public, is_deleted=is_deleted) + addon = project.get_addon('osfstorage') + addon.region = region + addon.save() + + return project + + @pytest.fixture() + def user(self): + return UserFactory() + + def test_populate_with_modified(self): + statement = migrate_deleted_date.UPDATE_DELETED_WITH_MODIFIED + table = 'osf_comment' + user = UserFactory() + project = ProjectFactory(creator=user) + comment = CommentFactory(user=user, node=project) + + comment.delete(Auth(user), save=True) + comment.reload() + assert(comment.deleted) + + comment.deleted = None + comment.save() + migrate_deleted_date.run_statements(statement, 1000, table) + comment.reload() + assert(comment.deleted) + assert(comment.deleted == comment.modified) + + def test_populate_columns(self): + statement = migrate_deleted_date.POPULATE_BASE_FILE_NODE + check_statement = migrate_deleted_date.CHECK_BASE_FILE_NODE + user = UserFactory() + project = self.project(user) + osf_folder = OsfStorageFolder.objects.filter(target_object_id=project.id)[0] + + project.remove_node(Auth(user)) + osf_folder.is_root = False + osf_folder.delete() + osf_folder.reload() + project.reload() + assert(osf_folder.deleted) + assert(project.deleted) + + project.deleted = None + osf_folder.deleted = None + + osf_folder.save() + project.save() + migrate_deleted_date.run_sql(statement, check_statement, 1000) + + osf_folder.reload() + assert(osf_folder.deleted) diff --git a/osf_tests/test_archiver.py b/osf_tests/test_archiver.py index a33e4d645c2..0ec92aaad25 100644 --- a/osf_tests/test_archiver.py +++ b/osf_tests/test_archiver.py @@ -261,7 +261,7 @@ def from_property(id, prop): 'type': 'object', 'properties': [ from_property(pid, sp) - for pid, sp in prop['value'].items() + for pid, sp in list(prop['value'].items()) ] } else: @@ -282,7 +282,7 @@ def from_question(qid, question): 'type': 'object', 'properties': [ from_property(id, value) - for id, value in question.get('value').items() + for id, value in list(question.get('value').items()) ] } else: @@ -770,7 +770,7 @@ def test_archive_failure_different_name_same_sha(self): def test_archive_success_same_file_in_component(self): file_tree = file_tree_factory(3, 3, 3) - selected = select_files_from_tree(file_tree).values()[0] + selected = list(select_files_from_tree(file_tree).values())[0] child_file_tree = file_tree_factory(0, 0, 0) child_file_tree['children'] = [selected] @@ -1191,7 +1191,11 @@ def test_find_failed_registrations(self): pending.append(reg) failed = Registration.find_failed_registrations() assert_equal(len(failed), 5) - assert_items_equal([f._id for f in failed], failures) + failures = list(failures) + failures.sort() + failed = list([f._id for f in failed]) + failed.sort() + assert_equals(failed, failures) for pk in legacy: assert_false(pk in failed) @@ -1209,7 +1213,7 @@ def error(*args, **kwargs): func(node=self.dst) mock_fail.assert_called_with( self.dst, - errors=[e.message] + errors=[str(e)] ) class TestArchiverBehavior(OsfTestCase): diff --git a/osf_tests/test_comment.py b/osf_tests/test_comment.py index 2ff96936842..2f2be4215bd 100644 --- a/osf_tests/test_comment.py +++ b/osf_tests/test_comment.py @@ -1,4 +1,8 @@ +import mock +import pytz import pytest +import datetime +from django.utils import timezone from collections import OrderedDict from addons.box.models import BoxFile @@ -367,9 +371,11 @@ def test_create_sends_mention_added_signal_if_group_member_mentions(self, node, def test_delete(self, node): comment = CommentFactory(node=node) auth = Auth(comment.user) - - comment.delete(auth=auth, save=True) + mock_now = datetime.datetime(2017, 3, 16, 11, 00, tzinfo=pytz.utc) + with mock.patch.object(timezone, 'now', return_value=mock_now): + comment.delete(auth=auth, save=True) assert comment.is_deleted, True + assert comment.deleted == mock_now assert comment.node.logs.count() == 2 assert comment.node.logs.latest().action == NodeLog.COMMENT_REMOVED @@ -379,6 +385,7 @@ def test_undelete(self): comment.delete(auth=auth, save=True) comment.undelete(auth=auth, save=True) assert not comment.is_deleted + assert not comment.deleted assert comment.node.logs.count() == 3 assert comment.node.logs.latest().action == NodeLog.COMMENT_RESTORED diff --git a/osf_tests/test_elastic_search.py b/osf_tests/test_elastic_search.py index 5ec4e1df4ae..72b056f2721 100644 --- a/osf_tests/test_elastic_search.py +++ b/osf_tests/test_elastic_search.py @@ -1324,28 +1324,28 @@ def setUp(self): def test_first_migration_no_remove(self): migrate(delete=False, remove=False, index=settings.ELASTIC_INDEX, app=self.app.app) var = self.es.indices.get_aliases() - assert_equal(var[settings.ELASTIC_INDEX + '_v1']['aliases'].keys()[0], settings.ELASTIC_INDEX) + assert_equal(list(var[settings.ELASTIC_INDEX + '_v1']['aliases'].keys())[0], settings.ELASTIC_INDEX) def test_multiple_migrations_no_remove(self): for n in range(1, 21): migrate(delete=False, remove=False, index=settings.ELASTIC_INDEX, app=self.app.app) var = self.es.indices.get_aliases() - assert_equal(var[settings.ELASTIC_INDEX + '_v{}'.format(n)]['aliases'].keys()[0], settings.ELASTIC_INDEX) + assert_equal(list(var[settings.ELASTIC_INDEX + '_v{}'.format(n)]['aliases'].keys())[0], settings.ELASTIC_INDEX) def test_first_migration_with_remove(self): migrate(delete=False, remove=True, index=settings.ELASTIC_INDEX, app=self.app.app) var = self.es.indices.get_aliases() - assert_equal(var[settings.ELASTIC_INDEX + '_v1']['aliases'].keys()[0], settings.ELASTIC_INDEX) + assert_equal(list(var[settings.ELASTIC_INDEX + '_v1']['aliases'].keys())[0], settings.ELASTIC_INDEX) def test_multiple_migrations_with_remove(self): for n in range(1, 21, 2): migrate(delete=False, remove=True, index=settings.ELASTIC_INDEX, app=self.app.app) var = self.es.indices.get_aliases() - assert_equal(var[settings.ELASTIC_INDEX + '_v{}'.format(n)]['aliases'].keys()[0], settings.ELASTIC_INDEX) + assert_equal(list(var[settings.ELASTIC_INDEX + '_v{}'.format(n)]['aliases'].keys())[0], settings.ELASTIC_INDEX) migrate(delete=False, remove=True, index=settings.ELASTIC_INDEX, app=self.app.app) var = self.es.indices.get_aliases() - assert_equal(var[settings.ELASTIC_INDEX + '_v{}'.format(n + 1)]['aliases'].keys()[0], settings.ELASTIC_INDEX) + assert_equal(list(var[settings.ELASTIC_INDEX + '_v{}'.format(n + 1)]['aliases'].keys())[0], settings.ELASTIC_INDEX) assert not var.get(settings.ELASTIC_INDEX + '_v{}'.format(n)) def test_migration_institutions(self): diff --git a/osf_tests/test_file_metadata.py b/osf_tests/test_file_metadata.py index 3fe5a5445c4..ec386096ebd 100644 --- a/osf_tests/test_file_metadata.py +++ b/osf_tests/test_file_metadata.py @@ -150,7 +150,7 @@ def test_update_record(self, node, record, initial_metadata): assert node.logs.latest().action == NodeLog.FILE_METADATA_UPDATED # Make sure old fields are cleared - assert initial_metadata.keys() not in record.metadata.keys() + assert list(initial_metadata.keys()) not in list(record.metadata.keys()) full_metadata = { 'funders': [ diff --git a/osf_tests/test_node.py b/osf_tests/test_node.py index 8b7bea86afa..7498bf56734 100644 --- a/osf_tests/test_node.py +++ b/osf_tests/test_node.py @@ -686,6 +686,11 @@ def test_node_factory(self): for addon in node.addons if addon.config.short_name == addon_config.short_name ]) + mock_now = datetime.datetime(2017, 3, 16, 11, 00, tzinfo=pytz.utc) + with mock.patch.object(timezone, 'now', return_value=mock_now): + deleted_node = NodeFactory(is_deleted=True) + assert deleted_node.is_deleted + assert deleted_node.deleted == mock_now def test_project_factory(self): node = ProjectFactory() @@ -698,6 +703,7 @@ def test_project_factory(self): assert node.is_public is False assert node.is_deleted is False assert hasattr(node, 'deleted_date') + assert hasattr(node, 'deleted') assert node.is_registration is False assert hasattr(node, 'registered_date') assert node.is_fork is False @@ -932,7 +938,9 @@ def test_add_contributor_unreg_user_without_unclaimed_records(self, user, node): with pytest.raises(UserStateError) as excinfo: node.add_contributor(unregistered_user, auth=Auth(user)) - assert 'This contributor cannot be added' in excinfo.value.message + assert str(excinfo.value) == 'This contributor cannot be added. ' \ + 'If the problem persists please report it to please report it to' \ + ' support@osf.io.' def test_cant_add_creator_as_contributor_twice(self, node, user): node.add_contributor(contributor=user) @@ -1048,7 +1056,7 @@ def test_set_visible_is_noop_if_visibility_is_unchanged(self, node, auth): def test_set_visible_contributor_with_only_one_contributor(self, node, user): with pytest.raises(ValueError) as excinfo: node.set_visible(user=user, visible=False, auth=None) - assert excinfo.value.message == 'Must have at least one visible contributor' + assert str(excinfo.value) == 'Must have at least one visible contributor' def test_set_visible_missing(self, node): with pytest.raises(ValueError): @@ -1271,12 +1279,12 @@ def test_add_contributor_registered_or_not_unreg_user_without_unclaimed_records( def test_add_contributor_user_id_already_contributor(self, user, node): with pytest.raises(ValidationError) as excinfo: node.add_contributor_registered_or_not(auth=Auth(user), user_id=user._id, save=True) - assert 'is already a contributor' in excinfo.value.message + assert 'is already a contributor' in str(excinfo.value) def test_add_contributor_invalid_user_id(self, user, node): with pytest.raises(ValueError) as excinfo: node.add_contributor_registered_or_not(auth=Auth(user), user_id='abcde', save=True) - assert 'was not found' in excinfo.value.message + assert 'was not found' in str(excinfo.value) def test_add_contributor_fullname_email(self, user, node): contributor_obj = node.add_contributor_registered_or_not(auth=Auth(user), full_name='Jane Doe', email='jane@doe.com') @@ -1888,7 +1896,7 @@ def test_cannot_register_deleted_node(self, node, auth): auth=auth, data=None ) - assert err.value.message == 'Cannot register deleted node.' + assert str(err.value) == 'Cannot register deleted node.' @mock.patch('website.project.signals.after_create_registration') def test_register_node_copies_subjects(self, mock_signal, subject): @@ -3921,6 +3929,7 @@ def test_delete_project_log_present(self, project, parent_project, auth): assert parent_project.is_deleted # parent node should have a log of the event assert parent_project.logs.latest().action == 'project_deleted' + assert parent_project.deleted == parent_project.logs.latest().date def test_remove_project_with_project_child_deletes_all_in_hierarchy(self, parent_project, project, auth): parent_project.remove_node(auth=auth) @@ -4366,7 +4375,7 @@ def node(self, user, parent): @pytest.fixture(autouse=True) def mock_addons(self, node): - def mock_get_addon(addon_name, deleted=False): + def mock_get_addon(addon_name, is_deleted=False): # Overrides AddonModelMixin.get_addon -- without backrefs, # no longer guaranteed to return the same set of objects-in-memory return self.patched_addons.get(addon_name, None) diff --git a/osf_tests/test_quickfiles.py b/osf_tests/test_quickfiles.py index 2fdfe880f41..3d155b24158 100644 --- a/osf_tests/test_quickfiles.py +++ b/osf_tests/test_quickfiles.py @@ -6,7 +6,7 @@ from addons.osfstorage.models import OsfStorageFile from osf.exceptions import MaxRetriesError, NodeStateError from api_tests.utils import create_test_file -from tests.utils import assert_items_equal +from tests.utils import assert_equals from tests.base import get_default_metaschema from . import factories @@ -134,7 +134,7 @@ def test_quickfiles_moves_files_on_triple_merge_with_name_conflict(self, user, q actual_filenames = list(OsfStorageFile.objects.all().values_list('name', flat=True)) expected_filenames = ['Woo.pdf', 'Woo (1).pdf', 'Woo (2).pdf'] - assert_items_equal(actual_filenames, expected_filenames) + assert_equals(actual_filenames, expected_filenames) def test_quickfiles_moves_files_on_triple_merge_with_name_conflict_with_digit(self, user, quickfiles): name = 'Woo (1).pdf' @@ -153,7 +153,7 @@ def test_quickfiles_moves_files_on_triple_merge_with_name_conflict_with_digit(se actual_filenames = list(OsfStorageFile.objects.all().values_list('name', flat=True)) expected_filenames = ['Woo (1).pdf', 'Woo (2).pdf', 'Woo (3).pdf'] - assert_items_equal(actual_filenames, expected_filenames) + assert_equals(actual_filenames, expected_filenames) def test_quickfiles_moves_destination_quickfiles_has_weird_numbers(self, user, quickfiles): other_user = factories.UserFactory() @@ -174,7 +174,7 @@ def test_quickfiles_moves_destination_quickfiles_has_weird_numbers(self, user, q actual_filenames = list(quickfiles.files.all().values_list('name', flat=True)) expected_filenames = ['Woo.pdf', 'Woo (1).pdf', 'Woo (2).pdf', 'Woo (3).pdf'] - assert_items_equal(actual_filenames, expected_filenames) + assert_equals(actual_filenames, expected_filenames) @mock.patch('osf.models.user.MAX_QUICKFILES_MERGE_RENAME_ATTEMPTS', 1) def test_quickfiles_moves_errors_after_max_renames(self, user, quickfiles): diff --git a/osf_tests/utils.py b/osf_tests/utils.py index 7d7bd8d5e4b..7edfa224901 100644 --- a/osf_tests/utils.py +++ b/osf_tests/utils.py @@ -139,11 +139,10 @@ def mock_archive(project, schema=None, auth=None, data=None, parent=None, root_job.done = True root_job.save() sanction = registration.sanction - with contextlib.nested( - mock.patch.object(root_job, 'archive_tree_finished', mock.Mock(return_value=True)), - mock.patch('website.archiver.tasks.archive_success.delay', mock.Mock()) - ): - archiver_listeners.archive_callback(registration) + mock.patch.object(root_job, 'archive_tree_finished', mock.Mock(return_value=True)), + mock.patch('website.archiver.tasks.archive_success.delay', mock.Mock()) + archiver_listeners.archive_callback(registration) + if autoapprove: sanction = registration.sanction sanction.state = Sanction.APPROVED diff --git a/package.json b/package.json index 864d0504cd6..8a756331088 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "OSF", - "version": "19.29.0", + "version": "19.30.0", "description": "Facilitating Open Science", "repository": "https://github.com/CenterForOpenScience/osf.io", "author": "Center for Open Science", diff --git a/scripts/analytics/addon_snapshot.py b/scripts/analytics/addon_snapshot.py index d768175bb01..9c8010458e7 100644 --- a/scripts/analytics/addon_snapshot.py +++ b/scripts/analytics/addon_snapshot.py @@ -91,7 +91,7 @@ def get_events(self, date=None): connected_count += 1 deleted_count = addon.models['nodesettings'].objects.filter(deleted=True).count() if addon.models.get('nodesettings') else 0 if has_external_account: - disconnected_count = addon.models['nodesettings'].objects.filter(external_account__isnull=True, deleted=False).count() if addon.models.get('nodesettings') else 0 + disconnected_count = addon.models['nodesettings'].objects.filter(external_account__isnull=True, is_deleted=False).count() if addon.models.get('nodesettings') else 0 else: if addon.models.get('nodesettings'): for nsm in addon.models['nodesettings'].objects.filter(deleted=False): diff --git a/scripts/tests/test_migrate_analytics.py b/scripts/tests/test_migrate_analytics.py index 9d38a2d4d65..15aef992b64 100644 --- a/scripts/tests/test_migrate_analytics.py +++ b/scripts/tests/test_migrate_analytics.py @@ -63,12 +63,12 @@ def test_generate_events_between_events(self): assert_equal(len(generated_events), 3) returned_dates = [event['keen']['timestamp'] for event in generated_events] expected_dates = ['2016-03-{}T00:00:00+00:00'.format(i) for i in range(13, 16)] - assert_items_equal(returned_dates, expected_dates) + assert_equals(returned_dates, expected_dates) # check the totals are the same as the first event returned_totals = [event['nodes']['total'] for event in generated_events] expected_totals = [self.keen_event['nodes']['total'] for i in range(len(generated_events))] - assert_items_equal(returned_totals, expected_totals) + assert_equals(returned_totals, expected_totals) def test_fill_in_event_gaps(self): filled_in_events = fill_in_event_gaps('test', [self.keen_event, self.keen_event_2, self.keen_event_3]) @@ -79,4 +79,4 @@ def test_fill_in_event_gaps(self): returned_dates = [event['keen']['timestamp'] for event in filled_in_events] expected_dates = ['2016-03-{}T00:00:00+00:00'.format(i) for i in range(13, 16)] expected_dates += ['2016-03-{}T00:00:00+00:00'.format(i) for i in range(17, 19)] - assert_items_equal(returned_dates, expected_dates) + assert_equals(returned_dates, expected_dates) diff --git a/scripts/tests/test_node_log_events.py b/scripts/tests/test_node_log_events.py index b99fe793912..6917c02b234 100644 --- a/scripts/tests/test_node_log_events.py +++ b/scripts/tests/test_node_log_events.py @@ -66,5 +66,5 @@ def test_results_structure(self): } ] - assert_items_equal(expected, self.results) + assert_equals(expected, self.results) diff --git a/scripts/tests/test_populate_popular_projects_and_registrations.py b/scripts/tests/test_populate_popular_projects_and_registrations.py index 5b07240ddb6..71795051000 100644 --- a/scripts/tests/test_populate_popular_projects_and_registrations.py +++ b/scripts/tests/test_populate_popular_projects_and_registrations.py @@ -93,5 +93,5 @@ def test_populate_popular_nodes_and_registrations(self, mock_client): assert_equal(len(self.popular_links_node.nodes), 2) assert_equal(len(self.popular_links_registrations.nodes), 2) - assert_items_equal(popular_nodes, self.popular_links_node.nodes) - assert_items_equal(popular_registrations, self.popular_links_registrations.nodes) + assert_equals(popular_nodes, self.popular_links_node.nodes) + assert_equals(popular_registrations, self.popular_links_registrations.nodes) diff --git a/tasks/__init__.py b/tasks/__init__.py index 1f771aee720..117f07cf943 100755 --- a/tasks/__init__.py +++ b/tasks/__init__.py @@ -20,7 +20,7 @@ try: from tasks import local # noqa -except ImportError as error: +except ImportError: print('No tasks/local.py file found. ' 'Did you remember to copy local-dist.py to local.py?') @@ -814,7 +814,7 @@ def webpack(ctx, clean=False, watch=False, dev=False, colors=False): def build_js_config_files(ctx): from website import settings print('Building JS config files...') - with open(os.path.join(settings.STATIC_FOLDER, 'built', 'nodeCategories.json'), 'wb') as fp: + with open(os.path.join(settings.STATIC_FOLDER, 'built', 'nodeCategories.json'), 'w') as fp: json.dump(settings.NODE_CATEGORY_MAP, fp) print('...Done.') diff --git a/tests/base.py b/tests/base.py index 672cc5f2113..6297cf9e87a 100644 --- a/tests/base.py +++ b/tests/base.py @@ -114,6 +114,7 @@ class AppTestCase(unittest.TestCase): def setUp(self): super(AppTestCase, self).setUp() self.app = TestApp(test_app) + self.app.lint = False # This breaks things in Py3 if not self.PUSH_CONTEXT: return self.context = test_app.test_request_context(headers={ diff --git a/tests/json_api_test_app.py b/tests/json_api_test_app.py index cd7062d5edf..bfb1e10ed98 100644 --- a/tests/json_api_test_app.py +++ b/tests/json_api_test_app.py @@ -71,7 +71,13 @@ def do_request(self, req, status, expect_errors): # https://code.djangoproject.com/ticket/11111 ? req.environ['REMOTE_ADDR'] = str(req.environ['REMOTE_ADDR']) req.environ['PATH_INFO'] = str(req.environ['PATH_INFO']) - + auth = req.environ.get('HTTP_AUTHORIZATION') + if auth is None: + req.environ['HTTP_AUTHORIZATION'] = 'None' + elif isinstance(auth, bytes): + req.environ['HTTP_AUTHORIZATION'] = str(auth.decode()) + else: + req.environ['HTTP_AUTHORIZATION'] = str(auth) # Curry a data dictionary into an instance of the template renderer # callback function. data = {} diff --git a/tests/test_addons.py b/tests/test_addons.py index df3d43c7c2f..3211b01433d 100644 --- a/tests/test_addons.py +++ b/tests/test_addons.py @@ -574,8 +574,8 @@ def test_has_permission_download_prereg_challenge_admin_not_draft(self): def test_has_permission_write_prereg_challenge_admin(self): with assert_raises(HTTPError) as exc_info: views.check_access(self.draft_registration.branched_from, - Auth(user=self.prereg_challenge_admin_user), WRITE, None) - assert_equal(exc_info.exception.code, http.FORBIDDEN) + Auth(user=self.prereg_challenge_admin_user), WRITE, None) + assert_equal(exc_info.exception.code, http_status.HTTP_403_FORBIDDEN) class TestCheckOAuth(OsfTestCase): diff --git a/tests/test_campaigns.py b/tests/test_campaigns.py index 69bf2afee3c..8d3d900d3c2 100644 --- a/tests/test_campaigns.py +++ b/tests/test_campaigns.py @@ -221,6 +221,7 @@ class TestRegistrationThroughCampaigns(OsfTestCase): def setUp(self): super(TestRegistrationThroughCampaigns, self).setUp() + campaigns.get_campaigns() # Set up global CAMPAIGNS def test_confirm_email_get_with_campaign(self): for key, value in campaigns.CAMPAIGNS.items(): diff --git a/tests/test_cas_authentication.py b/tests/test_cas_authentication.py index 3c4f56237d6..d7ad27ed741 100644 --- a/tests/test_cas_authentication.py +++ b/tests/test_cas_authentication.py @@ -179,7 +179,7 @@ def test_application_token_revocation_succeeds(self): responses.Response( responses.POST, url, - body={'client_id': client_id, + json={'client_id': client_id, 'client_secret': client_secret}, status=204 ) @@ -197,7 +197,7 @@ def test_application_token_revocation_fails(self): responses.Response( responses.POST, url, - body={'client_id': client_id, + json={'client_id': client_id, 'client_secret': client_secret}, status=400 ) diff --git a/tests/test_citeprocpy.py b/tests/test_citeprocpy.py index dfde0d77b3c..1a599505489 100644 --- a/tests/test_citeprocpy.py +++ b/tests/test_citeprocpy.py @@ -38,7 +38,6 @@ def test_failing_citations(self): citeprocpy = '' if citeprocpy == v: matches.append(k) - print k assert(len(matches) == 0) def test_passing_citations(self): @@ -57,7 +56,6 @@ def test_passing_citations(self): if citeprocpy != v: not_matches.append(k) citation.append(citeprocpy) - print k assert(len(not_matches) == 0) diff --git a/tests/test_conferences.py b/tests/test_conferences.py index 079659eddea..fdfc3c93ab3 100644 --- a/tests/test_conferences.py +++ b/tests/test_conferences.py @@ -5,9 +5,8 @@ import hmac import hashlib -from StringIO import StringIO +from io import BytesIO -from django.core.exceptions import ValidationError from django.db import IntegrityError import furl @@ -142,7 +141,7 @@ def setUp(self): self.conference = ConferenceFactory() self.body = 'dragon on my back' self.content = 'dragon attack' - self.attachment = StringIO(self.content) + self.attachment = BytesIO(self.content) self.recipient = '{0}{1}-poster@osf.io'.format( 'test-' if settings.DEV_MODE else '', self.conference.endpoint, @@ -412,7 +411,7 @@ def test_attachments_count_zero(self): def test_attachments_count_one(self): content = 'slightly mad' - sio = StringIO(content) + sio = BytesIO(content) ctx = self.make_context( method='POST', data={ diff --git a/tests/test_events.py b/tests/test_events.py index 560c7e8979c..68335f78ed3 100644 --- a/tests/test_events.py +++ b/tests/test_events.py @@ -622,9 +622,9 @@ def setUp(self): 'none': [] } self.diff_1_3 = {email_transactional: ['d1234'], 'none': ['g1234'], email_digest: ['a1234']} - self.union_1_2 = {'email_transactional': ['e1234', 'd1234', 'k1234', 'f1234'], + self.union_1_2 = {'email_transactional': ['e1234', 'f1234', 'd1234', 'k1234'], 'none': ['h1234', 'g1234', 'i1234', 'l1234'], - 'email_digest': ['j1234', 'b1234', 'a1234', 'c1234']} + 'email_digest': ['j1234', 'a1234', 'c1234', 'b1234']} self.dup_1_3 = {email_transactional: ['e1234', 'f1234'], 'none': ['h1234', 'g1234'], 'email_digest': ['a1234', 'c1234']} @@ -634,19 +634,26 @@ def test_subscription_user_difference(self): def test_subscription_user_union(self): result = utils.subscriptions_users_union(self.emails_1, self.emails_2) - assert_equal(self.union_1_2, result) + assert set(self.union_1_2['email_transactional']) == set(result['email_transactional']) + assert set(self.union_1_2['none']) == set(result['none']) + assert set(self.union_1_2['email_digest']) == set(result['email_digest']) def test_remove_duplicates(self): result = utils.subscriptions_users_remove_duplicates( self.emails_1, self.emails_4, remove_same=False ) - assert_equal(self.dup_1_3, result) + assert set(self.dup_1_3['email_transactional']) == set(result['email_transactional']) + assert set(self.dup_1_3['none']) == set(result['none']) + assert set(self.dup_1_3['email_digest']) == set(result['email_digest']) def test_remove_duplicates_true(self): result = utils.subscriptions_users_remove_duplicates( self.emails_1, self.emails_1, remove_same=True ) - assert_equal({email_digest: [], email_transactional: [], 'none': ['h1234', 'g1234', 'i1234']}, result) + + assert set(result['none']) == set(['h1234', 'g1234', 'i1234']) + assert result['email_digest'] == [] + assert result['email_transactional'] == [] wb_path = u'5581cb50a24f710b0f4623f9' diff --git a/tests/test_notifications.py b/tests/test_notifications.py index a237f23d728..ffceafc35a6 100644 --- a/tests/test_notifications.py +++ b/tests/test_notifications.py @@ -752,12 +752,12 @@ def test_get_all_node_subscriptions_given_user_subscriptions(self): node_subscription_ids = [x._id for x in utils.get_all_node_subscriptions(self.user, self.node, user_subscriptions=user_subscriptions)] expected_node_subscription_ids = [x._id for x in self.node_subscription] - assert_items_equal(node_subscription_ids, expected_node_subscription_ids) + assert_list_equal(node_subscription_ids, expected_node_subscription_ids) def test_get_all_node_subscriptions_given_user_and_node(self): node_subscription_ids = [x._id for x in utils.get_all_node_subscriptions(self.user, self.node)] expected_node_subscription_ids = [x._id for x in self.node_subscription] - assert_items_equal(node_subscription_ids, expected_node_subscription_ids) + assert_list_equal(node_subscription_ids, expected_node_subscription_ids) def test_get_configured_project_ids_does_not_return_user_or_node_ids(self): configured_nodes = utils.get_configured_projects(self.user) @@ -996,8 +996,8 @@ def test_format_user_subscriptions(self): 'children': [] }, { 'event': { - 'title': 'global_mentions', - 'description': constants.USER_SUBSCRIPTIONS_AVAILABLE['global_mentions'], + 'title': 'global_comments', + 'description': constants.USER_SUBSCRIPTIONS_AVAILABLE['global_comments'], 'notificationType': 'email_transactional', 'parent_notification_type': None }, @@ -1005,8 +1005,8 @@ def test_format_user_subscriptions(self): 'children': [] }, { 'event': { - 'title': 'global_comments', - 'description': constants.USER_SUBSCRIPTIONS_AVAILABLE['global_comments'], + 'title': 'global_mentions', + 'description': constants.USER_SUBSCRIPTIONS_AVAILABLE['global_mentions'], 'notificationType': 'email_transactional', 'parent_notification_type': None }, @@ -1023,7 +1023,8 @@ def test_format_user_subscriptions(self): 'children': [] } ] - assert_items_equal(data, expected) + + assert_equal(data, expected) def test_get_global_notification_type(self): notification_type = utils.get_global_notification_type(self.user_subscription[1] ,self.user) diff --git a/tests/test_oauth.py b/tests/test_oauth.py index 3784f6762ad..818d9a56b54 100644 --- a/tests/test_oauth.py +++ b/tests/test_oauth.py @@ -1,6 +1,6 @@ from datetime import datetime -from rest_framework import status as http_status import logging +from rest_framework import status as http_status import json import logging import mock diff --git a/tests/test_preprints.py b/tests/test_preprints.py index c998f1f93fc..2d1947262b9 100644 --- a/tests/test_preprints.py +++ b/tests/test_preprints.py @@ -456,7 +456,7 @@ def test_set_visible_is_noop_if_visibility_is_unchanged(self, preprint, auth): def test_set_visible_contributor_with_only_one_contributor(self, preprint, user): with pytest.raises(ValueError) as excinfo: preprint.set_visible(user=user, visible=False, auth=None) - assert excinfo.value.message == 'Must have at least one visible contributor' + assert str(excinfo.value) == 'Must have at least one visible contributor' def test_set_visible_missing(self, preprint): with pytest.raises(ValueError): @@ -596,7 +596,7 @@ def test_add_contributor_user_id_already_contributor(self, user, preprint): def test_add_contributor_invalid_user_id(self, user, preprint): with pytest.raises(ValueError) as excinfo: preprint.add_contributor_registered_or_not(auth=Auth(user), user_id='abcde', save=True) - assert 'was not found' in excinfo.value.message + assert 'was not found' in str(excinfo.value) def test_add_contributor_fullname_email(self, user, preprint): contributor_obj = preprint.add_contributor_registered_or_not(auth=Auth(user), full_name='Jane Doe', email='jane@doe.com') @@ -1594,7 +1594,7 @@ def test_admin_cannot_unpublish(self): with assert_raises(ValueError) as e: self.preprint.set_published(False, auth=Auth(self.user), save=True) - assert_in('Cannot unpublish', e.exception.message) + assert_in('Cannot unpublish', str(e.exception)) def test_set_title_permissions(self): original_title = self.preprint.title @@ -1945,20 +1945,20 @@ def test_format_preprint(self): assert set(gn['@type'] for gn in res) == {'creator', 'contributor', 'throughsubjects', 'subject', 'throughtags', 'tag', 'workidentifier', 'agentidentifier', 'person', 'preprint', 'workrelation', 'creativework'} nodes = dict(enumerate(res)) - preprint = nodes.pop(next(k for k, v in nodes.items() if v['@type'] == 'preprint')) + preprint = nodes.pop(next(k for k, v in iter(nodes.items()) if v['@type'] == 'preprint')) assert preprint['title'] == self.preprint.title assert preprint['description'] == self.preprint.description assert preprint['is_deleted'] == (not self.preprint.is_published or not self.preprint.is_public or self.preprint.is_preprint_orphan) assert preprint['date_updated'] == self.preprint.modified.isoformat() assert preprint['date_published'] == self.preprint.date_published.isoformat() - tags = [nodes.pop(k) for k, v in nodes.items() if v['@type'] == 'tag'] - through_tags = [nodes.pop(k) for k, v in nodes.items() if v['@type'] == 'throughtags'] + tags = [nodes.pop(k) for k, v in list(nodes.items()) if v['@type'] == 'tag'] + through_tags = [nodes.pop(k) for k, v in list(nodes.items()) if v['@type'] == 'throughtags'] assert sorted(tag['@id'] for tag in tags) == sorted(tt['tag']['@id'] for tt in through_tags) assert sorted(tag['name'] for tag in tags) == ['preprint', 'spoderman'] - subjects = [nodes.pop(k) for k, v in nodes.items() if v['@type'] == 'subject'] - through_subjects = [nodes.pop(k) for k, v in nodes.items() if v['@type'] == 'throughsubjects'] + subjects = [nodes.pop(k) for k, v in list(nodes.items()) if v['@type'] == 'subject'] + through_subjects = [nodes.pop(k) for k, v in list(nodes.items()) if v['@type'] == 'throughsubjects'] s_ids = [s['@id'] for s in subjects] ts_ids = [ts['subject']['@id'] for ts in through_subjects] cs_ids = [i for i in set(s.get('central_synonym', {}).get('@id') for s in subjects) if i] @@ -1970,7 +1970,7 @@ def test_format_preprint(self): assert s['uri'].endswith('v2/taxonomies/{}/'.format(subject._id)) # This cannot change assert set(subject['name'] for subject in subjects) == set([s.text for s in self.preprint.subjects.all()] + [s.bepress_subject.text for s in self.preprint.subjects.filter(bepress_subject__isnull=False)]) - people = sorted([nodes.pop(k) for k, v in nodes.items() if v['@type'] == 'person'], key=lambda x: x['given_name']) + people = sorted([nodes.pop(k) for k, v in list(nodes.items()) if v['@type'] == 'person'], key=lambda x: x['given_name']) expected_people = sorted([{ '@type': 'person', 'given_name': u'BoJack', @@ -1989,7 +1989,7 @@ def test_format_preprint(self): assert people == expected_people - creators = sorted([nodes.pop(k) for k, v in nodes.items() if v['@type'] == 'creator'], key=lambda x: x['order_cited']) + creators = sorted([nodes.pop(k) for k, v in list(nodes.items()) if v['@type'] == 'creator'], key=lambda x: x['order_cited']) assert creators == [{ '@id': creators[0]['@id'], '@type': 'creator', @@ -2006,7 +2006,7 @@ def test_format_preprint(self): 'creative_work': {'@id': preprint['@id'], '@type': preprint['@type']}, }] - contributors = [nodes.pop(k) for k, v in nodes.items() if v['@type'] == 'contributor'] + contributors = [nodes.pop(k) for k, v in list(nodes.items()) if v['@type'] == 'contributor'] assert contributors == [{ '@id': contributors[0]['@id'], '@type': 'contributor', @@ -2015,7 +2015,7 @@ def test_format_preprint(self): 'creative_work': {'@id': preprint['@id'], '@type': preprint['@type']}, }] - agentidentifiers = {nodes.pop(k)['uri'] for k, v in nodes.items() if v['@type'] == 'agentidentifier'} + agentidentifiers = {nodes.pop(k)['uri'] for k, v in list(nodes.items()) if v['@type'] == 'agentidentifier'} assert agentidentifiers == set([ 'mailto:' + self.user.username, 'mailto:' + self.preprint.creator.username, @@ -2035,7 +2035,7 @@ def test_format_preprint(self): workidentifiers = [nodes.pop(k)['uri'] for k, v in nodes.items() if v['@type'] == 'workidentifier'] assert workidentifiers == [urljoin(settings.DOMAIN, self.preprint._id + '/')] - relation = nodes.pop(nodes.keys()[0]) + relation = nodes.pop(list(nodes.keys())[0]) assert relation == {'@id': relation['@id'], '@type': 'workrelation', 'related': {'@id': related_work['@id'], '@type': related_work['@type']}, 'subject': {'@id': preprint['@id'], '@type': preprint['@type']}} assert nodes == {} @@ -2480,7 +2480,7 @@ def build_payload(self, metadata, **kwargs): options.update(kwargs) options = { key: value - for key, value in options.iteritems() + for key, value in options.items() if value is not None } message, signature = signing.default_signer.sign_payload(options) diff --git a/tests/test_rubeus.py b/tests/test_rubeus.py index f69157818e0..742d76bbdda 100644 --- a/tests/test_rubeus.py +++ b/tests/test_rubeus.py @@ -1,23 +1,16 @@ #!/usr/bin/env python # encoding: utf-8 -import os -from types import NoneType -from xmlrpclib import DateTime - import mock from nose.tools import * # noqa: F403 from tests.base import OsfTestCase from osf_tests.factories import (UserFactory, ProjectFactory, NodeFactory, - AuthFactory, RegistrationFactory, - PrivateLinkFactory) + AuthFactory, PrivateLinkFactory) from framework.auth import Auth from website.util import rubeus from website.util.rubeus import sort_by_name -from osf.utils import sanitize - class TestRubeus(OsfTestCase): def setUp(self): diff --git a/tests/test_views.py b/tests/test_views.py index 544b95e99fd..27c11807103 100644 --- a/tests/test_views.py +++ b/tests/test_views.py @@ -41,7 +41,7 @@ from addons.osfstorage import settings as osfstorage_settings from osf.models import AbstractNode, NodeLog, QuickFilesNode from website.profile.utils import add_contributor_json, serialize_unregistered -from website.profile.views import fmt_date_or_none, update_osf_help_mails_subscription +from website.profile.views import update_osf_help_mails_subscription from website.project.decorators import check_can_access from website.project.model import has_anonymous_link from website.project.signals import contributor_added @@ -771,7 +771,7 @@ def test_suspended_project(self): node.suspended = True node.save() url = node.api_url - res = self.app.get(url, auth=Auth(self.user1), expect_errors=True) + res = self.app.get(url, expect_errors=True) assert_equal(res.status_code, 451) def test_private_link_edit_name(self): @@ -1093,13 +1093,6 @@ def setUp(self): super(TestUserProfile, self).setUp() self.user = AuthUserFactory() - def test_fmt_date_or_none(self): - with assert_raises(HTTPError) as cm: - #enter a date before 1900 - fmt_date_or_none(dt.datetime(1890, 10, 31, 18, 23, 29, 227)) - # error should be raised because date is before 1900 - assert_equal(cm.exception.code, http_status.HTTP_400_BAD_REQUEST) - def test_unserialize_social(self): url = api_url_for('unserialize_social') payload = { @@ -4156,7 +4149,7 @@ def test_user_unsubscribe_and_subscribe_help_mailing_list(self): def test_get_notifications(self): user = AuthUserFactory() - mailing_lists = dict(user.osf_mailing_lists.items() + user.mailchimp_mailing_lists.items()) + mailing_lists = dict(list(user.osf_mailing_lists.items()) + list(user.mailchimp_mailing_lists.items())) url = api_url_for('user_notifications') res = self.app.get(url, auth=user.auth) assert_equal(mailing_lists, res.json['mailing_lists']) diff --git a/tests/utils.py b/tests/utils.py index f87382131f0..3745844b87f 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -10,7 +10,6 @@ from framework.auth import Auth from framework.celery_tasks.handlers import celery_teardown_request -from framework.postcommit_tasks.handlers import postcommit_after_request from osf.models import Sanction from tests.base import get_default_metaschema from website.archiver import ARCHIVER_SUCCESS @@ -98,7 +97,7 @@ def wrapper(self, *args, **kwargs): return wrapper return outer_wrapper -def assert_items_equal(item_one, item_two): +def assert_equals(item_one, item_two): item_one.sort() item_two.sort() assert item_one == item_two @@ -185,11 +184,10 @@ def mock_archive(project, schema=None, auth=None, data=None, parent=None, root_job.done = True root_job.save() sanction = registration.root.sanction - with contextlib.nested( - mock.patch.object(root_job, 'archive_tree_finished', mock.Mock(return_value=True)), - mock.patch('website.archiver.tasks.archive_success.delay', mock.Mock()) - ): - archiver_listeners.archive_callback(registration) + mock.patch.object(root_job, 'archive_tree_finished', mock.Mock(return_value=True)) + mock.patch('website.archiver.tasks.archive_success.delay', mock.Mock()) + archiver_listeners.archive_callback(registration) + if autoapprove: sanction = registration.root.sanction sanction.state = Sanction.APPROVED diff --git a/website/app.py b/website/app.py index b3b43e0c19e..7f3d7774bfa 100644 --- a/website/app.py +++ b/website/app.py @@ -6,7 +6,6 @@ import json import logging import os -import thread from collections import OrderedDict import django @@ -95,16 +94,13 @@ def init_app(settings_module='website.settings', set_backends=True, routes=True, if app.config.get('IS_INITIALIZED', False) is True: return app - logger.info('Initializing the application from process {}, thread {}.'.format( - os.getpid(), thread.get_ident() - )) setup_django() # The settings module settings = importlib.import_module(settings_module) init_addons(settings, routes) - with open(os.path.join(settings.STATIC_FOLDER, 'built', 'nodeCategories.json'), 'wb') as fp: + with open(os.path.join(settings.STATIC_FOLDER, 'built', 'nodeCategories.json'), 'w') as fp: json.dump(settings.NODE_CATEGORY_MAP, fp) app.debug = settings.DEBUG_MODE diff --git a/website/archiver/decorators.py b/website/archiver/decorators.py index 117bddab2a3..0d6f46bfb37 100644 --- a/website/archiver/decorators.py +++ b/website/archiver/decorators.py @@ -20,6 +20,6 @@ def wrapped(*args, **kwargs): registration.save() signals.archive_fail.send( registration, - errors=[e.message] + errors=[str(e)] ) return wrapped diff --git a/website/filters/__init__.py b/website/filters/__init__.py index d209502972b..7deb5b30a72 100644 --- a/website/filters/__init__.py +++ b/website/filters/__init__.py @@ -11,7 +11,7 @@ def gravatar(user, use_ssl=False, d=None, r=None, size=None): # user can be a User instance or a username string username = user.username if hasattr(user, 'username') else user - hash_code = hashlib.md5(unicode(username).encode('utf-8')).hexdigest() + hash_code = hashlib.md5(str(username).encode()).hexdigest() url = base_url + '?' diff --git a/website/identifiers/clients/crossref.py b/website/identifiers/clients/crossref.py index da8bc00bb59..738baee4280 100644 --- a/website/identifiers/clients/crossref.py +++ b/website/identifiers/clients/crossref.py @@ -191,8 +191,8 @@ def _crossref_format_contributors(self, element, preprint): if name_parts.get('suffix'): person.append(element.suffix(remove_control_characters(name_parts['suffix']))) if contributor.external_identity.get('ORCID'): - orcid = contributor.external_identity['ORCID'].keys()[0] - verified = contributor.external_identity['ORCID'].values()[0] == 'VERIFIED' + orcid = list(contributor.external_identity['ORCID'].keys())[0] + verified = list(contributor.external_identity['ORCID'].values())[0] == 'VERIFIED' if orcid and verified: person.append( element.ORCID('https://orcid.org/{}'.format(orcid), authenticated='true') diff --git a/website/mailchimp_utils.py b/website/mailchimp_utils.py index a44b3316169..ec9d8844d1f 100644 --- a/website/mailchimp_utils.py +++ b/website/mailchimp_utils.py @@ -56,7 +56,7 @@ def subscribe_mailchimp(list_name, user_id): except (mailchimp.ValidationError, mailchimp.ListInvalidBounceMemberError) as error: sentry.log_exception() - sentry.log_message(error.message) + sentry.log_message(error) user.mailchimp_mailing_lists[list_name] = False else: user.mailchimp_mailing_lists[list_name] = True diff --git a/website/notifications/tasks.py b/website/notifications/tasks.py index e3b614487e8..881a81be508 100644 --- a/website/notifications/tasks.py +++ b/website/notifications/tasks.py @@ -41,7 +41,7 @@ def _send_global_and_node_emails(send_type): if sorted_messages: if not user.is_disabled: # If there's only one node in digest we can show it's preferences link in the template. - notification_nodes = sorted_messages['children'].keys() + notification_nodes = list(sorted_messages['children'].keys()) node = AbstractNode.load(notification_nodes[0]) if len( notification_nodes) == 1 else None mails.send_mail( diff --git a/website/profile/views.py b/website/profile/views.py index 7b4e4eab7fc..77943bdb304 100644 --- a/website/profile/views.py +++ b/website/profile/views.py @@ -1,7 +1,6 @@ # -*- coding: utf-8 -*- import logging from rest_framework import status as http_status -from dateutil.parser import parse as parse_date from django.utils import timezone from django.core.exceptions import ValidationError @@ -43,14 +42,6 @@ logger = logging.getLogger(__name__) -def date_or_none(date): - try: - return parse_date(date) - except Exception as error: - logger.exception(error) - return None - - def validate_user(data, user): """Check if the user in request is the user who log in """ if 'id' in data: @@ -372,7 +363,7 @@ def user_addons(auth, **kwargs): def user_notifications(auth, **kwargs): """Get subscribe data from user""" return { - 'mailing_lists': dict(auth.user.mailchimp_mailing_lists.items() + auth.user.osf_mailing_lists.items()) + 'mailing_lists': dict(list(auth.user.mailchimp_mailing_lists.items()) + list(auth.user.osf_mailing_lists.items())) } @must_be_logged_in @@ -610,18 +601,6 @@ def get_target_user(auth, uid=None): return target -def fmt_date_or_none(date, fmt='%Y-%m-%d'): - if date: - try: - return date.strftime(fmt) - except ValueError: - raise HTTPError( - http_status.HTTP_400_BAD_REQUEST, - data=dict(message_long='Year entered must be after 1900') - ) - return None - - def append_editable(data, auth, uid=None): target = get_target_user(auth, uid) data['editable'] = auth.user == target @@ -813,6 +792,7 @@ def request_export(auth): user.save() return {'message': 'Sent account export request'} + @must_be_logged_in def request_deactivation(auth): user = auth.user @@ -820,6 +800,7 @@ def request_deactivation(auth): user.save() return {'message': 'Sent account deactivation request'} + @must_be_logged_in def cancel_request_deactivation(auth): user = auth.user diff --git a/website/project/licenses/__init__.py b/website/project/licenses/__init__.py index d8ea754b12d..9fd2ca89e49 100644 --- a/website/project/licenses/__init__.py +++ b/website/project/licenses/__init__.py @@ -1,5 +1,5 @@ from django.apps import apps -from django.core.exceptions import ValidationError +from rest_framework.exceptions import ValidationError from framework import exceptions as framework_exceptions from osf import exceptions as osf_exceptions diff --git a/website/project/views/contributor.py b/website/project/views/contributor.py index d12d307f1b9..a893db05789 100644 --- a/website/project/views/contributor.py +++ b/website/project/views/contributor.py @@ -658,7 +658,7 @@ def verify_claim_token(user, token, pid): def check_external_auth(user): if user: return not user.has_usable_password() and ( - 'VERIFIED' in sum([each.values() for each in user.external_identity.values()], []) + 'VERIFIED' in sum([list(each.values()) for each in user.external_identity.values()], []) ) return False diff --git a/website/project/views/drafts.py b/website/project/views/drafts.py index dd2118edf1b..e267f8b921d 100644 --- a/website/project/views/drafts.py +++ b/website/project/views/drafts.py @@ -39,7 +39,7 @@ def get_schema_or_fail(schema_name, schema_version): try: meta_schema = RegistrationSchema.objects.get(name=schema_name, schema_version=schema_version) except RegistrationSchema.DoesNotExist: - raise HTTPError(http_status.HTTP_200_OK, data=dict( + raise HTTPError(http_status.HTTP_404_NOT_FOUND, data=dict( message_long='No RegistrationSchema record matching that query could be found' )) return meta_schema diff --git a/website/project/views/node.py b/website/project/views/node.py index d5944de5f94..55b80e8203e 100644 --- a/website/project/views/node.py +++ b/website/project/views/node.py @@ -4,10 +4,10 @@ from rest_framework import status as http_status import math from collections import defaultdict -from itertools import islice from flask import request from django.apps import apps +from django.utils import timezone from django.core.exceptions import ValidationError from django.db.models import Q, OuterRef, Subquery @@ -78,7 +78,7 @@ def edit_node(auth, node, **kwargs): except ValidationError as e: raise HTTPError( http_status.HTTP_400_BAD_REQUEST, - data=dict(message_long=e.message) + data=dict(message_long=str(e)) ) new_val = node.title elif edited_field == 'description': @@ -92,7 +92,7 @@ def edit_node(auth, node, **kwargs): except ValidationError as e: raise HTTPError( http_status.HTTP_400_BAD_REQUEST, - data=dict(message_long=e.message) + data=dict(message_long=str(e)) ) return { 'status': 'success', @@ -145,7 +145,7 @@ def project_new_post(auth, **kwargs): except ValidationError as e: raise HTTPError( http_status.HTTP_400_BAD_REQUEST, - data=dict(message_long=e.message) + data=dict(message_long=str(e)) ) new_project = _view_project(project, auth) return { @@ -189,7 +189,7 @@ def project_new_node(auth, node, **kwargs): except ValidationError as e: raise HTTPError( http_status.HTTP_400_BAD_REQUEST, - data=dict(message_long=e.message) + data=dict(message_long=str(e)) ) redirect_url = node.url message = ( @@ -640,6 +640,7 @@ def remove_private_link(*args, **kwargs): raise HTTPError(http_status.HTTP_404_NOT_FOUND) link.is_deleted = True + link.deleted = timezone.now() link.save() for node in link.nodes.all(): @@ -832,7 +833,7 @@ def _view_project(node, auth, primary=False, 'addon_widget_css': css, 'node_categories': [ {'value': key, 'display_name': value} - for key, value in settings.NODE_CATEGORY_MAP.items() + for key, value in list(settings.NODE_CATEGORY_MAP.items()) ] } @@ -1131,7 +1132,7 @@ def project_generate_private_link_post(auth, node, **kwargs): except ValidationError as e: raise HTTPError( http_status.HTTP_400_BAD_REQUEST, - data=dict(message_long=e.message) + data=dict(message_long=str(e)) ) return new_link @@ -1230,7 +1231,7 @@ def search_node(auth, **kwargs): return { 'nodes': [ _serialize_node_search(each) - for each in islice(nodes, start, start + size) + for each in nodes[start: start + size] if each.contributors ], 'total': count, diff --git a/website/project/views/register.py b/website/project/views/register.py index 3ce84e31a56..62de934efd5 100644 --- a/website/project/views/register.py +++ b/website/project/views/register.py @@ -203,7 +203,7 @@ def project_before_register(auth, node, **kwargs): messages[settings.ADDONS_ARCHIVABLE[name]]['addons'].add(addon.config.full_name) else: messages['none']['addons'].add(addon.config.full_name) - error_messages = errors.values() + error_messages = list(errors.values()) prompts = [ m['message'].format(util.conjunct(m['addons'])) diff --git a/website/routes.py b/website/routes.py index 039d1821f6f..b494af9385d 100644 --- a/website/routes.py +++ b/website/routes.py @@ -3,9 +3,7 @@ import os from rest_framework import status as http_status import requests - from future.moves.urllib.parse import urljoin - import json import waffle diff --git a/website/search/elastic_search.py b/website/search/elastic_search.py index 382aab78f47..d88e4a999f7 100644 --- a/website/search/elastic_search.py +++ b/website/search/elastic_search.py @@ -398,7 +398,7 @@ def serialize_node(node, category): normalized_title = six.u(node.title) except TypeError: normalized_title = node.title - normalized_title = unicodedata.normalize('NFKD', normalized_title).encode('ascii', 'ignore') + normalized_title = unicodedata.normalize('NFKD', normalized_title).encode('ascii', 'ignore').decode() elastic_document = { 'id': node._id, 'contributors': [ @@ -699,7 +699,7 @@ def update_user(user, index=None): val = six.u(val) except TypeError: pass # This is fine, will only happen in 2.x if val is already unicode - normalized_names[key] = unicodedata.normalize('NFKD', val).encode('ascii', 'ignore') + normalized_names[key] = unicodedata.normalize('NFKD', val).encode('ascii', 'ignore').decode() user_doc = { 'id': user._id, diff --git a/website/search_migration/migrate.py b/website/search_migration/migrate.py index bf28d1d8226..5bb1b9a2d08 100644 --- a/website/search_migration/migrate.py +++ b/website/search_migration/migrate.py @@ -241,7 +241,7 @@ def set_up_index(idx): es_client().indices.put_alias(index=index, name=idx) else: # Increment version - version = int(alias.keys()[0].split('_v')[1]) + 1 + version = int(list(alias.keys())[0].split('_v')[1]) + 1 logger.info('Incrementing index version to {}'.format(version)) index = '{0}_v{1}'.format(idx, version) search.create_index(index=index) diff --git a/website/settings/__init__.py b/website/settings/__init__.py index d132dea6977..f6b4452bb80 100644 --- a/website/settings/__init__.py +++ b/website/settings/__init__.py @@ -11,7 +11,7 @@ try: from .local import * # noqa -except ImportError as error: +except ImportError: raise ImportError('No local.py settings file found. Did you remember to ' 'copy local-dist.py to local.py?') diff --git a/website/settings/defaults.py b/website/settings/defaults.py index 9bfe700d72e..5539f594988 100644 --- a/website/settings/defaults.py +++ b/website/settings/defaults.py @@ -407,6 +407,8 @@ class CeleryConfig: 'scripts.remove_after_use.end_prereg_challenge', 'osf.management.commands.check_crossref_dois', 'osf.management.commands.migrate_pagecounter_data', + 'osf.management.commands.migrate_deleted_date', + 'osf.management.commands.addon_deleted_date', } med_pri_modules = { @@ -601,6 +603,13 @@ class CeleryConfig: # 'task': 'management.commands.migrate_pagecounter_data', # 'schedule': crontab(minute=0, hour=7), # Daily 2:00 a.m. # }, + # 'migrate_deleted_date': { + # 'task': 'management.commands.migrate_deleted_date', + # 'schedule': crontab(minute=0, hour=3), + # 'addon_deleted_date': { + # 'task': 'management.commands.addon_deleted_date', + # 'schedule': crontab(minute=0, hour=3), # Daily 11:00 p.m. + # }, 'generate_sitemap': { 'task': 'scripts.generate_sitemap', 'schedule': crontab(minute=0, hour=5), # Daily 12:00 a.m. diff --git a/website/templates/project/addons.mako b/website/templates/project/addons.mako index 8b8d943fe13..745d93ff50a 100644 --- a/website/templates/project/addons.mako +++ b/website/templates/project/addons.mako @@ -164,7 +164,7 @@ ${ tpl | n } -% for name, capabilities in addon_capabilities.iteritems(): +% for name, capabilities in addon_capabilities.items(): % endfor diff --git a/website/util/share.py b/website/util/share.py index 7b97788ab15..defe2a41018 100644 --- a/website/util/share.py +++ b/website/util/share.py @@ -44,8 +44,8 @@ def format_user(user): person.attrs['identifiers'] = [GraphNode('agentidentifier', agent=person, uri='mailto:{}'.format(uri)) for uri in user.emails.values_list('address', flat=True)] person.attrs['identifiers'].append(GraphNode('agentidentifier', agent=person, uri=user.absolute_url)) - if user.external_identity.get('ORCID') and user.external_identity['ORCID'].values()[0] == 'VERIFIED': - person.attrs['identifiers'].append(GraphNode('agentidentifier', agent=person, uri=user.external_identity['ORCID'].keys()[0])) + if user.external_identity.get('ORCID') and list(user.external_identity['ORCID'].values())[0] == 'VERIFIED': + person.attrs['identifiers'].append(GraphNode('agentidentifier', agent=person, uri=list(user.external_identity['ORCID'].keys())[0])) if user.is_registered: person.attrs['identifiers'].append(GraphNode('agentidentifier', agent=person, uri=user.profile_image_url()))