diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 710f0b86983..c0efd8b145e 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -22,3 +22,11 @@ repos:
rev: v2.13.6
hooks:
- id: jshint
+
+- repo: https://github.com/astral-sh/ruff-pre-commit
+ rev: v0.5.7
+ hooks:
+ - id: ruff
+ args: [ --fix ]
+ - id: ruff-format
+ args: ["--line-length=79"]
diff --git a/Dockerfile b/Dockerfile
index 189b0e998b9..15429b0980a 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -21,6 +21,7 @@ RUN apk add --no-cache --virtual .run-deps \
libpq-dev \
libffi \
libev \
+ libev-dev \
libevent \
&& yarn global add bower \
&& mkdir -p /var/www \
diff --git a/addons/base/__init__.py b/addons/base/__init__.py
index 26219a8fee2..30fd8751cda 100644
--- a/addons/base/__init__.py
+++ b/addons/base/__init__.py
@@ -1,15 +1,16 @@
from django.db.models import options
-default_app_config = 'addons.base.apps.BaseAddonAppConfig'
+
+default_app_config = "addons.base.apps.BaseAddonAppConfig"
# Patch to make abstractproperties overridable by djangofields
-if 'add_field' not in options.DEFAULT_NAMES:
- options.DEFAULT_NAMES += ('add_field', )
+if "add_field" not in options.DEFAULT_NAMES:
+ options.DEFAULT_NAMES += ("add_field",)
original_add_field = options.Options.add_field
def add_field_patched(self, field, **kwargs):
prop = getattr(field.model, field.name, None)
- if prop and getattr(prop, '__isabstractmethod__', None):
+ if prop and getattr(prop, "__isabstractmethod__", None):
setattr(field.model, field.name, None)
return original_add_field(field.model._meta, field, **kwargs)
diff --git a/addons/base/apps.py b/addons/base/apps.py
index 66038377f9a..256fd27287a 100644
--- a/addons/base/apps.py
+++ b/addons/base/apps.py
@@ -13,19 +13,20 @@
def _is_image(filename):
mtype, _ = mimetypes.guess_type(filename)
- return mtype and mtype.startswith('image')
+ return mtype and mtype.startswith("image")
+
NODE_SETTINGS_TEMPLATE_DEFAULT = os.path.join(
settings.TEMPLATES_PATH,
- 'project',
- 'addon',
- 'node_settings_default.mako',
+ "project",
+ "addon",
+ "node_settings_default.mako",
)
USER_SETTINGS_TEMPLATE_DEFAULT = os.path.join(
settings.TEMPLATES_PATH,
- 'profile',
- 'user_settings_default.mako',
+ "profile",
+ "user_settings_default.mako",
)
@@ -42,16 +43,17 @@ def _root_folder(node_settings, auth, **kwargs):
permissions=auth,
nodeUrl=node.url,
nodeApiUrl=node.api_url,
- private_key=kwargs.get('view_only', None),
+ private_key=kwargs.get("view_only", None),
)
return [root]
- _root_folder.__name__ = f'{addon_short_name}_root_folder'
+
+ _root_folder.__name__ = f"{addon_short_name}_root_folder"
return _root_folder
class BaseAddonAppConfig(AppConfig):
- name = 'addons.base'
- label = 'addons_base'
+ name = "addons.base"
+ label = "addons_base"
actions = tuple()
user_settings = None
@@ -81,26 +83,20 @@ def __init__(self, *args, **kwargs):
paths.append(os.path.dirname(self.user_settings_template))
if self.node_settings_template:
paths.append(os.path.dirname(self.node_settings_template))
- template_dirs = list(
- {
- path
- for path in paths
- if os.path.exists(path)
- }
- )
+ template_dirs = list({path for path in paths if os.path.exists(path)})
if template_dirs:
self.template_lookup = TemplateLookup(
directories=template_dirs,
default_filters=[
- 'unicode', # default filter; must set explicitly when overriding
- 'temp_ampersand_fixer',
+ "unicode", # default filter; must set explicitly when overriding
+ "temp_ampersand_fixer",
# FIXME: Temporary workaround for data stored in wrong format in DB. Unescape it before it gets re-escaped by Markupsafe. See [#OSF-4432]
- 'h',
+ "h",
],
imports=[
- 'from website.util.sanitize import temp_ampersand_fixer',
+ "from website.util.sanitize import temp_ampersand_fixer",
# FIXME: Temporary workaround for data stored in wrong format in DB. Unescape it before it gets re-escaped by Markupsafe. See [#OSF-4432]
- ]
+ ],
)
else:
self.template_lookup = None
@@ -119,8 +115,8 @@ def icon(self):
try:
return self._icon
except Exception:
- static_path = os.path.join('addons', self.short_name, 'static')
- static_files = glob.glob(os.path.join(static_path, 'comicon.*'))
+ static_path = os.path.join("addons", self.short_name, "static")
+ static_files = glob.glob(os.path.join(static_path, "comicon.*"))
image_files = [
os.path.split(filename)[1]
for filename in static_files
@@ -144,22 +140,24 @@ def _static_url(self, filename):
:return str: Static URL for file
"""
- if filename.startswith('/'):
+ if filename.startswith("/"):
return filename
- return '/static/addons/{addon}/{filename}'.format(
+ return "/static/addons/{addon}/{filename}".format(
addon=self.short_name,
filename=filename,
)
def to_json(self):
return {
- 'short_name': self.short_name,
- 'full_name': self.full_name,
- 'capabilities': self.short_name in settings.ADDON_CAPABILITIES,
- 'addon_capabilities': settings.ADDON_CAPABILITIES.get(self.short_name),
- 'icon': self.icon_url,
- 'has_page': 'page' in self.views,
- 'has_widget': 'widget' in self.views,
+ "short_name": self.short_name,
+ "full_name": self.full_name,
+ "capabilities": self.short_name in settings.ADDON_CAPABILITIES,
+ "addon_capabilities": settings.ADDON_CAPABILITIES.get(
+ self.short_name
+ ),
+ "icon": self.icon_url,
+ "has_page": "page" in self.views,
+ "has_widget": "widget" in self.views,
}
# Override Appconfig
diff --git a/addons/base/exceptions.py b/addons/base/exceptions.py
index 3edda785149..37a85a37d83 100644
--- a/addons/base/exceptions.py
+++ b/addons/base/exceptions.py
@@ -24,4 +24,5 @@ class DoesNotExist(AddonError):
class NotApplicableError(AddonError):
"""This exception is used by non-storage and/or non-oauth add-ons when they don't need or have certain features."""
+
pass
diff --git a/addons/base/generic_views.py b/addons/base/generic_views.py
index d7eb4da6ced..edaac454ea7 100644
--- a/addons/base/generic_views.py
+++ b/addons/base/generic_views.py
@@ -15,17 +15,18 @@
def import_auth(addon_short_name, Serializer):
- @must_have_addon(addon_short_name, 'user')
- @must_have_addon(addon_short_name, 'node')
+ @must_have_addon(addon_short_name, "user")
+ @must_have_addon(addon_short_name, "node")
@must_have_permission(permissions.WRITE)
def _import_auth(auth, node_addon, user_addon, **kwargs):
- """Import add-on credentials from the currently logged-in user to a node.
- """
+ """Import add-on credentials from the currently logged-in user to a node."""
external_account = ExternalAccount.load(
- request.json['external_account_id']
+ request.json["external_account_id"]
)
- if not user_addon.external_accounts.filter(id=external_account.id).exists():
+ if not user_addon.external_accounts.filter(
+ id=external_account.id
+ ).exists():
raise HTTPError(http_status.HTTP_403_FORBIDDEN)
try:
@@ -36,10 +37,11 @@ def _import_auth(auth, node_addon, user_addon, **kwargs):
node_addon.save()
return {
- 'result': Serializer().serialize_settings(node_addon, auth.user),
- 'message': 'Successfully imported credentials from profile.',
+ "result": Serializer().serialize_settings(node_addon, auth.user),
+ "message": "Successfully imported credentials from profile.",
}
- _import_auth.__name__ = f'{addon_short_name}_import_auth'
+
+ _import_auth.__name__ = f"{addon_short_name}_import_auth"
return _import_auth
@@ -49,74 +51,80 @@ def _account_list(auth):
user_settings = auth.user.get_addon(addon_short_name)
serializer = Serializer(user_settings=user_settings)
return serializer.serialized_user_settings
- _account_list.__name__ = f'{addon_short_name}_account_list'
+
+ _account_list.__name__ = f"{addon_short_name}_account_list"
return _account_list
def folder_list(addon_short_name, addon_full_name, get_folders):
# TODO [OSF-6678]: Generalize this for API use after node settings have been refactored
- @must_have_addon(addon_short_name, 'node')
+ @must_have_addon(addon_short_name, "node")
@must_be_addon_authorizer(addon_short_name)
def _folder_list(node_addon, **kwargs):
"""Returns a list of folders"""
if not node_addon.has_auth:
raise HTTPError(http_status.HTTP_403_FORBIDDEN)
- folder_id = request.args.get('folderId')
+ folder_id = request.args.get("folderId")
return get_folders(node_addon, folder_id)
- _folder_list.__name__ = f'{addon_short_name}_folder_list'
+
+ _folder_list.__name__ = f"{addon_short_name}_folder_list"
return _folder_list
def get_config(addon_short_name, Serializer):
@must_be_logged_in
- @must_have_addon(addon_short_name, 'node')
+ @must_have_addon(addon_short_name, "node")
@must_be_valid_project
@must_have_permission(permissions.WRITE)
def _get_config(node_addon, auth, **kwargs):
"""API that returns the serialized node settings."""
return {
- 'result': Serializer().serialize_settings(
- node_addon,
- auth.user
- )
+ "result": Serializer().serialize_settings(node_addon, auth.user)
}
- _get_config.__name__ = f'{addon_short_name}_get_config'
+
+ _get_config.__name__ = f"{addon_short_name}_get_config"
return _get_config
def set_config(addon_short_name, addon_full_name, Serializer, set_folder):
@must_not_be_registration
- @must_have_addon(addon_short_name, 'user')
- @must_have_addon(addon_short_name, 'node')
+ @must_have_addon(addon_short_name, "user")
+ @must_have_addon(addon_short_name, "node")
@must_be_addon_authorizer(addon_short_name)
@must_have_permission(permissions.WRITE)
def _set_config(node_addon, user_addon, auth, **kwargs):
"""View for changing a node's linked folder."""
- folder = request.json.get('selected')
+ folder = request.json.get("selected")
set_folder(node_addon, folder, auth)
path = node_addon.folder_path
return {
- 'result': {
- 'folder': {
- 'name': path.replace('All Files', '') if path != '/' else f'/ (Full {addon_full_name})',
- 'path': path,
+ "result": {
+ "folder": {
+ "name": path.replace("All Files", "")
+ if path != "/"
+ else f"/ (Full {addon_full_name})",
+ "path": path,
},
- 'urls': Serializer(node_settings=node_addon).addon_serialized_urls,
+ "urls": Serializer(
+ node_settings=node_addon
+ ).addon_serialized_urls,
},
- 'message': 'Successfully updated settings.',
+ "message": "Successfully updated settings.",
}
- _set_config.__name__ = f'{addon_short_name}_set_config'
+
+ _set_config.__name__ = f"{addon_short_name}_set_config"
return _set_config
def deauthorize_node(addon_short_name):
@must_not_be_registration
- @must_have_addon(addon_short_name, 'node')
+ @must_have_addon(addon_short_name, "node")
@must_have_permission(permissions.WRITE)
def _deauthorize_node(auth, node_addon, **kwargs):
node_addon.deauthorize(auth=auth)
node_addon.save()
- _deauthorize_node.__name__ = f'{addon_short_name}_deauthorize_node'
+
+ _deauthorize_node.__name__ = f"{addon_short_name}_deauthorize_node"
return _deauthorize_node
diff --git a/addons/base/logger.py b/addons/base/logger.py
index edb80e25098..cb27265a7f9 100644
--- a/addons/base/logger.py
+++ b/addons/base/logger.py
@@ -1,11 +1,13 @@
import abc
+
class AddonNodeLogger:
"""Helper class for adding correctly-formatted addon logs to nodes.
:param Node node: The node to add logs to
:param Auth auth: Authorization of the person who did the action.
"""
+
__metaclass__ = abc.ABCMeta
@property
@@ -14,13 +16,15 @@ def addon_short_name(self):
pass
def _log_params(self):
- node_settings = self.node.get_addon(self.addon_short_name, is_deleted=True)
+ node_settings = self.node.get_addon(
+ self.addon_short_name, is_deleted=True
+ )
return {
- 'project': self.node.parent_id,
- 'node': self.node._primary_key,
- 'folder_id': node_settings.folder_id,
- 'folder_name': node_settings.folder_name,
- 'folder': node_settings.folder_path
+ "project": self.node.parent_id,
+ "node": self.node._primary_key,
+ "folder_id": node_settings.folder_id,
+ "folder_name": node_settings.folder_name,
+ "folder": node_settings.folder_path,
}
def __init__(self, node, auth, path=None):
@@ -39,24 +43,30 @@ def log(self, action, extra=None, save=False):
params = self._log_params()
# If logging a file-related action, add the file's view and download URLs
if self.path:
- params.update({
- 'urls': {
- 'view': self.node.web_url_for('addon_view_or_download_file', path=self.path, provider=self.addon_short_name),
- 'download': self.node.web_url_for(
- 'addon_view_or_download_file',
- path=self.path,
- provider=self.addon_short_name
- )
- },
- 'path': self.path,
- })
+ params.update(
+ {
+ "urls": {
+ "view": self.node.web_url_for(
+ "addon_view_or_download_file",
+ path=self.path,
+ provider=self.addon_short_name,
+ ),
+ "download": self.node.web_url_for(
+ "addon_view_or_download_file",
+ path=self.path,
+ provider=self.addon_short_name,
+ ),
+ },
+ "path": self.path,
+ }
+ )
if extra:
params.update(extra)
self.node.add_log(
- action=f'{self.addon_short_name}_{action}',
+ action=f"{self.addon_short_name}_{action}",
params=params,
- auth=self.auth
+ auth=self.auth,
)
if save:
self.node.save()
diff --git a/addons/base/models.py b/addons/base/models.py
index 46b2203cbb6..32f0b2a5c50 100644
--- a/addons/base/models.py
+++ b/addons/base/models.py
@@ -21,21 +21,19 @@
from website.oauth.signals import oauth_complete
lookup = TemplateLookup(
- directories=[
- settings.TEMPLATES_PATH
- ],
+ directories=[settings.TEMPLATES_PATH],
default_filters=[
- 'unicode', # default filter; must set explicitly when overriding
+ "unicode", # default filter; must set explicitly when overriding
# FIXME: Temporary workaround for data stored in wrong format in DB. Unescape it before it
# gets re-escaped by Markupsafe. See [#OSF-4432]
- 'temp_ampersand_fixer',
- 'h',
+ "temp_ampersand_fixer",
+ "h",
],
imports=[
# FIXME: Temporary workaround for data stored in wrong format in DB. Unescape it before it
# gets re-escaped by Markupsafe. See [#OSF-4432]
- 'from website.util.sanitize import temp_ampersand_fixer',
- ]
+ "from website.util.sanitize import temp_ampersand_fixer",
+ ],
)
@@ -70,8 +68,8 @@ def undelete(self, save=True):
def to_json(self, user):
return {
- 'addon_short_name': self.config.short_name,
- 'addon_full_name': self.config.full_name,
+ "addon_short_name": self.config.short_name,
+ "addon_full_name": self.config.full_name,
}
#############
@@ -88,8 +86,13 @@ def on_delete(self):
class BaseUserSettings(BaseAddonSettings):
- owner = models.OneToOneField(OSFUser, related_name='%(app_label)s_user_settings',
- blank=True, null=True, on_delete=models.CASCADE)
+ owner = models.OneToOneField(
+ OSFUser,
+ related_name="%(app_label)s_user_settings",
+ blank=True,
+ null=True,
+ on_delete=models.CASCADE,
+ )
class Meta:
abstract = True
@@ -112,33 +115,42 @@ def nodes_authorized(self):
model = self.config.node_settings
if not model:
return []
- return [obj.owner for obj in model.objects.filter(user_settings=self, owner__is_deleted=False).select_related('owner')]
+ return [
+ obj.owner
+ for obj in model.objects.filter(
+ user_settings=self, owner__is_deleted=False
+ ).select_related("owner")
+ ]
@property
def can_be_merged(self):
- return hasattr(self, 'merge')
+ return hasattr(self, "merge")
def to_json(self, user):
ret = super().to_json(user)
- ret['has_auth'] = self.has_auth
- ret.update({
- 'nodes': [
- {
- '_id': node._id,
- 'url': node.url,
- 'title': node.title,
- 'registered': node.is_registration,
- 'api_url': node.api_url
- }
- for node in self.nodes_authorized
- ]
- })
+ ret["has_auth"] = self.has_auth
+ ret.update(
+ {
+ "nodes": [
+ {
+ "_id": node._id,
+ "url": node.url,
+ "title": node.title,
+ "registered": node.is_registration,
+ "api_url": node.api_url,
+ }
+ for node in self.nodes_authorized
+ ]
+ }
+ )
return ret
def __repr__(self):
if self.owner:
- return f'<{self.__class__.__name__} owned by user {self.owner._id}>'
- return f'<{self.__class__.__name__} with no owner>'
+ return (
+ f"<{self.__class__.__name__} owned by user {self.owner._id}>"
+ )
+ return f"<{self.__class__.__name__} with no owner>"
@oauth_complete.connect
@@ -181,17 +193,23 @@ def has_auth(self):
@property
def external_accounts(self):
"""The user's list of ``ExternalAccount`` instances for this provider"""
- return self.owner.external_accounts.filter(provider=self.oauth_provider.short_name)
+ return self.owner.external_accounts.filter(
+ provider=self.oauth_provider.short_name
+ )
def delete(self, save=True):
- for account in self.external_accounts.filter(provider=self.config.short_name):
+ for account in self.external_accounts.filter(
+ provider=self.config.short_name
+ ):
self.revoke_oauth_access(account, save=False)
super().delete(save=save)
def grant_oauth_access(self, node, external_account, metadata=None):
"""Give a node permission to use an ``ExternalAccount`` instance."""
# ensure the user owns the external_account
- if not self.owner.external_accounts.filter(id=external_account.id).exists():
+ if not self.owner.external_accounts.filter(
+ id=external_account.id
+ ).exists():
raise PermissionsError()
metadata = metadata or {}
@@ -220,13 +238,17 @@ def revoke_oauth_access(self, external_account, auth, save=True):
"""
for node in self.get_nodes_with_oauth_grants(external_account):
try:
- node.get_addon(external_account.provider, is_deleted=True).deauthorize(auth=auth)
+ node.get_addon(
+ external_account.provider, is_deleted=True
+ ).deauthorize(auth=auth)
except AttributeError:
# No associated addon settings despite oauth grant
pass
- if external_account.osfuser_set.count() == 1 and \
- external_account.osfuser_set.filter(id=auth.user.id).exists():
+ if (
+ external_account.osfuser_set.count() == 1
+ and external_account.osfuser_set.filter(id=auth.user.id).exists()
+ ):
# Only this user is using the account, so revoke remote access as well.
self.revoke_remote_oauth_access(external_account)
@@ -236,7 +258,7 @@ def revoke_oauth_access(self, external_account, auth, save=True):
self.save()
def revoke_remote_oauth_access(self, external_account):
- """ Makes outgoing request to remove the remote oauth grant
+ """Makes outgoing request to remove the remote oauth grant
stored by third-party provider.
Individual addons must override this method, as it is addon-specific behavior.
@@ -297,7 +319,7 @@ def get_attached_nodes(self, external_account):
def merge(self, user_settings):
"""Merge `user_settings` into this instance"""
if user_settings.__class__ is not self.__class__:
- raise TypeError('Cannot merge different addons')
+ raise TypeError("Cannot merge different addons")
for node_id, data in user_settings.oauth_grants.items():
if node_id not in self.oauth_grants:
@@ -319,18 +341,20 @@ def merge(self, user_settings):
config = settings.ADDONS_AVAILABLE_DICT[
self.oauth_provider.short_name
]
- Model = config.models['nodesettings']
+ Model = config.models["nodesettings"]
except KeyError:
pass
else:
- Model.objects.filter(user_settings=user_settings).update(user_settings=self)
+ Model.objects.filter(user_settings=user_settings).update(
+ user_settings=self
+ )
self.save()
def to_json(self, user):
ret = super().to_json(user)
- ret['accounts'] = self.serializer(
+ ret["accounts"] = self.serializer(
user_settings=self
).serialized_accounts
@@ -341,10 +365,11 @@ def to_json(self, user):
#############
def on_delete(self):
- """When the user deactivates the addon, clear auth for connected nodes.
- """
+ """When the user deactivates the addon, clear auth for connected nodes."""
super().on_delete()
- nodes = [AbstractNode.load(node_id) for node_id in self.oauth_grants.keys()]
+ nodes = [
+ AbstractNode.load(node_id) for node_id in self.oauth_grants.keys()
+ ]
for node in nodes:
node_addon = node.get_addon(self.oauth_provider.short_name)
if node_addon and node_addon.user_settings == self:
@@ -352,8 +377,13 @@ def on_delete(self):
class BaseNodeSettings(BaseAddonSettings):
- owner = models.OneToOneField(AbstractNode, related_name='%(app_label)s_node_settings',
- null=True, blank=True, on_delete=models.CASCADE)
+ owner = models.OneToOneField(
+ AbstractNode,
+ related_name="%(app_label)s_node_settings",
+ null=True,
+ blank=True,
+ on_delete=models.CASCADE,
+ )
class Meta:
abstract = True
@@ -379,18 +409,20 @@ def has_auth(self):
def to_json(self, user):
ret = super().to_json(user)
- ret.update({
- 'user': {
- 'permissions': self.owner.get_permissions(user)
- },
- 'node': {
- 'id': self.owner._id,
- 'api_url': self.owner.api_url,
- 'url': self.owner.url,
- 'is_registration': self.owner.is_registration,
- },
- 'node_settings_template': os.path.basename(self.config.node_settings_template),
- })
+ ret.update(
+ {
+ "user": {"permissions": self.owner.get_permissions(user)},
+ "node": {
+ "id": self.owner._id,
+ "api_url": self.owner.api_url,
+ "url": self.owner.url,
+ "is_registration": self.owner.is_registration,
+ },
+ "node_settings_template": os.path.basename(
+ self.config.node_settings_template
+ ),
+ }
+ )
return ret
#############
@@ -455,30 +487,30 @@ def before_fork(self, node, user):
:returns Alert message
"""
- if hasattr(self, 'user_settings'):
+ if hasattr(self, "user_settings"):
if self.user_settings is None:
return (
- f'Because you have not configured the {self.config.full_name} '
- 'add-on, your authentication will not be transferred to the forked '
- f'{node.project_or_component}. You may authorize and configure the '
- f'{self.config.full_name} add-on in the new fork on the settings '
- 'page.'
+ f"Because you have not configured the {self.config.full_name} "
+ "add-on, your authentication will not be transferred to the forked "
+ f"{node.project_or_component}. You may authorize and configure the "
+ f"{self.config.full_name} add-on in the new fork on the settings "
+ "page."
)
elif self.user_settings and self.user_settings.owner == user:
return (
- f'Because you have authorized the {self.config.full_name} add-on '
- f'for this {node.project_or_component}, forking it will also '
- 'transfer your authentication to the forked '
- f'{node.project_or_component}.'
+ f"Because you have authorized the {self.config.full_name} add-on "
+ f"for this {node.project_or_component}, forking it will also "
+ "transfer your authentication to the forked "
+ f"{node.project_or_component}."
)
else:
return (
- f'Because the {self.config.full_name} add-on has been authorized '
- 'by a different user, forking it will not transfer authentication '
- f'to the forked {node.project_or_component}. You may authorize and '
- f'configure the {self.config.full_name} add-on in the new fork on '
- 'the settings page.'
+ f"Because the {self.config.full_name} add-on has been authorized "
+ "by a different user, forking it will not transfer authentication "
+ f"to the forked {node.project_or_component}. You may authorize and "
+ f"configure the {self.config.full_name} add-on in the new fork on "
+ "the settings page."
)
def after_fork(self, node, fork, user, save=True):
@@ -536,9 +568,10 @@ def after_delete(self, user):
# Archiver #
############
+
class GenericRootNode:
- path = '/'
- name = ''
+ path = "/"
+ name = ""
class BaseStorageAddon:
@@ -553,66 +586,72 @@ class Meta:
@property
def archive_folder_name(self):
- name = f'Archive of {self.config.full_name}'
- folder_name = getattr(self, 'folder_name', '').lstrip('/').strip()
+ name = f"Archive of {self.config.full_name}"
+ folder_name = getattr(self, "folder_name", "").lstrip("/").strip()
if folder_name:
- name = name + f': {folder_name}'
+ name = name + f": {folder_name}"
return name
- def _get_fileobj_child_metadata(self, filenode, user, cookie=None, version=None):
+ def _get_fileobj_child_metadata(
+ self, filenode, user, cookie=None, version=None
+ ):
from api.base.utils import waterbutler_api_url_for
kwargs = {}
if version:
- kwargs['version'] = version
+ kwargs["version"] = version
if cookie:
- kwargs['cookie'] = cookie
+ kwargs["cookie"] = cookie
elif user:
- kwargs['cookie'] = user.get_or_create_cookie().decode()
+ kwargs["cookie"] = user.get_or_create_cookie().decode()
metadata_url = waterbutler_api_url_for(
self.owner._id,
self.config.short_name,
- path=filenode.get('path', '/'),
+ path=filenode.get("path", "/"),
user=user,
view_only=True,
_internal=True,
base_url=self.owner.osfstorage_region.waterbutler_url,
- **kwargs
+ **kwargs,
)
res = requests.get(metadata_url)
if res.status_code != 200:
- raise HTTPError(res.status_code, data={'error': res.json()})
+ raise HTTPError(res.status_code, data={"error": res.json()})
# TODO: better throttling?
time.sleep(1.0 / 5.0)
- data = res.json().get('data', None)
+ data = res.json().get("data", None)
if data:
- return [child['attributes'] for child in data]
+ return [child["attributes"] for child in data]
return []
- def _get_file_tree(self, filenode=None, user=None, cookie=None, version=None):
+ def _get_file_tree(
+ self, filenode=None, user=None, cookie=None, version=None
+ ):
"""
Recursively get file metadata
"""
filenode = filenode or {
- 'path': '/',
- 'kind': 'folder',
- 'name': self.root_node.name,
+ "path": "/",
+ "kind": "folder",
+ "name": self.root_node.name,
}
- if filenode.get('kind') == 'file':
+ if filenode.get("kind") == "file":
return filenode
kwargs = {
- 'version': version,
- 'cookie': cookie,
+ "version": version,
+ "cookie": cookie,
}
- filenode['children'] = [
+ filenode["children"] = [
self._get_file_tree(child, user, cookie=cookie)
- for child in self._get_fileobj_child_metadata(filenode, user, **kwargs)
+ for child in self._get_fileobj_child_metadata(
+ filenode, user, **kwargs
+ )
]
return filenode
@@ -620,9 +659,13 @@ def _get_file_tree(self, filenode=None, user=None, cookie=None, version=None):
class BaseOAuthNodeSettings(BaseNodeSettings):
# TODO: Validate this field to be sure it matches the provider's short_name
# NOTE: Do not set this field directly. Use ``set_auth()``
- external_account = models.ForeignKey(ExternalAccount, null=True, blank=True,
- related_name='%(app_label)s_node_settings',
- on_delete=models.CASCADE)
+ external_account = models.ForeignKey(
+ ExternalAccount,
+ null=True,
+ blank=True,
+ related_name="%(app_label)s_node_settings",
+ on_delete=models.CASCADE,
+ )
# NOTE: Do not set this field directly. Use ``set_auth()``
# user_settings = fields.AbstractForeignField()
@@ -666,24 +709,21 @@ def nodelogger(self):
auth = Auth(self.user_settings.owner)
self._logger_class = getattr(
self,
- '_logger_class',
+ "_logger_class",
type(
- f'{self.config.short_name.capitalize()}NodeLogger',
+ f"{self.config.short_name.capitalize()}NodeLogger",
(logger.AddonNodeLogger,),
- {'addon_short_name': self.config.short_name}
- )
- )
- return self._logger_class(
- node=self.owner,
- auth=auth
+ {"addon_short_name": self.config.short_name},
+ ),
)
+ return self._logger_class(node=self.owner, auth=auth)
@property
def complete(self):
return bool(
- self.has_auth and
- self.external_account and
- self.user_settings.verify_oauth_access(
+ self.has_auth
+ and self.external_account
+ and self.user_settings.verify_oauth_access(
node=self.owner,
external_account=self.external_account,
)
@@ -692,8 +732,8 @@ def complete(self):
@property
def configured(self):
return bool(
- self.complete and
- (self.folder_id or self.folder_name or self.folder_path)
+ self.complete
+ and (self.folder_id or self.folder_name or self.folder_path)
)
@property
@@ -702,9 +742,9 @@ def has_auth(self):
return bool(
self.user_settings and self.user_settings.has_auth
) and bool(
- self.external_account and self.user_settings.verify_oauth_access(
- node=self.owner,
- external_account=self.external_account
+ self.external_account
+ and self.user_settings.verify_oauth_access(
+ node=self.owner, external_account=self.external_account
)
)
@@ -724,7 +764,7 @@ def set_auth(self, external_account, user, metadata=None, log=True):
user_settings.grant_oauth_access(
node=self.owner,
external_account=external_account,
- metadata=metadata # metadata can be passed in when forking
+ metadata=metadata, # metadata can be passed in when forking
)
user_settings.save()
@@ -733,7 +773,7 @@ def set_auth(self, external_account, user, metadata=None, log=True):
self.external_account = external_account
if log:
- self.nodelogger.log(action='node_authorized', save=True)
+ self.nodelogger.log(action="node_authorized", save=True)
self.save()
def deauthorize(self, auth=None, add_log=False):
@@ -760,9 +800,9 @@ def before_remove_contributor_message(self, node, removed):
"""
if self.has_auth and self.user_settings.owner == removed:
return (
- 'The {addon} add-on for this {category} is authenticated by {name}. '
- 'Removing this user will also remove write access to {addon} '
- 'unless another contributor re-authenticates the add-on.'
+ "The {addon} add-on for this {category} is authenticated by {name}. "
+ "Removing this user will also remove write access to {addon} "
+ "unless another contributor re-authenticates the add-on."
).format(
addon=self.config.full_name,
category=node.project_or_component,
@@ -777,23 +817,24 @@ def after_remove_contributor(self, node, removed, auth=None):
from owner.
"""
if self.user_settings and self.user_settings.owner == removed:
-
# Delete OAuth tokens
- self.user_settings.oauth_grants[self.owner._id].pop(self.external_account._id)
+ self.user_settings.oauth_grants[self.owner._id].pop(
+ self.external_account._id
+ )
self.user_settings.save()
self.clear_auth()
message = (
'Because the {addon} add-on for {category} "{title}" was authenticated '
- 'by {user}, authentication information has been deleted.'
+ "by {user}, authentication information has been deleted."
).format(
addon=self.config.full_name,
category=markupsafe.escape(node.category_display),
title=markupsafe.escape(node.title),
- user=markupsafe.escape(removed.fullname)
+ user=markupsafe.escape(removed.fullname),
)
if not auth or auth.user != removed:
- url = node.web_url_for('node_addons')
+ url = node.web_url_for("node_addons")
message += (
' You can re-authenticate on the add-ons page.'
).format(url=url)
@@ -816,10 +857,14 @@ def after_fork(self, node, fork, user, save=True):
metadata = None
if self.complete:
try:
- metadata = self.user_settings.oauth_grants[node._id][self.external_account._id]
+ metadata = self.user_settings.oauth_grants[node._id][
+ self.external_account._id
+ ]
except (KeyError, AttributeError):
pass
- clone.set_auth(self.external_account, user, metadata=metadata, log=False)
+ clone.set_auth(
+ self.external_account, user, metadata=metadata, log=False
+ )
else:
clone.clear_settings()
if save:
@@ -832,9 +877,9 @@ def before_register_message(self, node, user):
"""
if self.has_auth:
return (
- 'The contents of {addon} add-ons cannot be registered at this time; '
- 'the {addon} add-on linked to this {category} will not be included '
- 'as part of this registration.'
+ "The contents of {addon} add-ons cannot be registered at this time; "
+ "the {addon} add-on linked to this {category} will not be included "
+ "as part of this registration."
).format(
addon=self.config.full_name,
category=node.project_or_component,
@@ -844,12 +889,16 @@ def before_register_message(self, node, user):
before_register = before_register_message
def serialize_waterbutler_credentials(self):
- raise NotImplementedError("BaseOAuthNodeSettings subclasses must implement a \
- 'serialize_waterbutler_credentials' method.")
+ raise NotImplementedError(
+ "BaseOAuthNodeSettings subclasses must implement a \
+ 'serialize_waterbutler_credentials' method."
+ )
def serialize_waterbutler_settings(self):
- raise NotImplementedError("BaseOAuthNodeSettings subclasses must implement a \
- 'serialize_waterbutler_settings' method.")
+ raise NotImplementedError(
+ "BaseOAuthNodeSettings subclasses must implement a \
+ 'serialize_waterbutler_settings' method."
+ )
class BaseCitationsNodeSettings(BaseOAuthNodeSettings):
@@ -878,11 +927,14 @@ def api(self):
@property
def complete(self):
"""Boolean indication of addon completeness"""
- return bool(self.has_auth and self.user_settings.verify_oauth_access(
- node=self.owner,
- external_account=self.external_account,
- metadata={'folder': self.list_id},
- ))
+ return bool(
+ self.has_auth
+ and self.user_settings.verify_oauth_access(
+ node=self.owner,
+ external_account=self.external_account,
+ metadata={"folder": self.list_id},
+ )
+ )
@property
def root_folder(self):
@@ -905,9 +957,9 @@ def folder_path(self):
def fetch_folder_name(self):
"""Returns a displayable folder name"""
if self.list_id is None:
- return ''
- elif self.list_id == 'ROOT':
- return 'All Documents'
+ return ""
+ elif self.list_id == "ROOT":
+ return "All Documents"
else:
return self._fetch_folder_name
@@ -930,10 +982,10 @@ def deauthorize(self, auth=None, add_log=True):
"""Remove user authorization from this node and log the event."""
if add_log:
self.owner.add_log(
- f'{self.provider_name}_node_deauthorized',
+ f"{self.provider_name}_node_deauthorized",
params={
- 'project': self.owner.parent_id,
- 'node': self.owner._id,
+ "project": self.owner.parent_id,
+ "node": self.owner._id,
},
auth=auth,
)
diff --git a/addons/base/serializer.py b/addons/base/serializer.py
index e10fc5766d2..50cac053749 100644
--- a/addons/base/serializer.py
+++ b/addons/base/serializer.py
@@ -40,22 +40,23 @@ def credentials_owner(self):
@property
def serialized_node_settings(self):
result = {
- 'nodeHasAuth': self.node_settings.has_auth,
- 'userIsOwner': self.user_is_owner,
- 'urls': self.serialized_urls,
+ "nodeHasAuth": self.node_settings.has_auth,
+ "userIsOwner": self.user_is_owner,
+ "urls": self.serialized_urls,
}
if self.user_settings:
- result['userHasAuth'] = self.user_settings.has_auth
+ result["userHasAuth"] = self.user_settings.has_auth
else:
- result['userHasAuth'] = False
+ result["userHasAuth"] = False
if self.node_settings.has_auth:
owner = self.credentials_owner
if owner:
- result['urls']['owner'] = web_url_for('profile_view_id',
- uid=owner._primary_key)
- result['ownerName'] = owner.fullname
+ result["urls"]["owner"] = web_url_for(
+ "profile_view_id", uid=owner._primary_key
+ )
+ result["ownerName"] = owner.fullname
return result
@property
@@ -64,7 +65,6 @@ def serialized_user_settings(self):
class OAuthAddonSerializer(AddonSerializer):
-
@property
def credentials_owner(self):
return self.user_settings.owner if self.user_settings else None
@@ -76,8 +76,8 @@ def user_is_owner(self):
user_accounts = self.user_settings.external_accounts.all()
return bool(
- self.node_settings.has_auth and
- self.node_settings.external_account in user_accounts
+ self.node_settings.has_auth
+ and self.node_settings.external_account in user_accounts
)
@property
@@ -87,7 +87,7 @@ def serialized_urls(self):
for url in self.REQUIRED_URLS:
msg = f"addon_serialized_urls must include key '{url}'"
assert url in ret, msg
- ret.update({'settings': web_url_for('user_addons')})
+ ret.update({"settings": web_url_for("user_addons")})
return ret
@property
@@ -100,9 +100,9 @@ def serialized_accounts(self):
@property
def serialized_user_settings(self):
retval = super().serialized_user_settings
- retval['accounts'] = []
+ retval["accounts"] = []
if self.user_settings:
- retval['accounts'] = self.serialized_accounts
+ retval["accounts"] = self.serialized_accounts
return retval
@@ -110,13 +110,13 @@ def serialize_account(self, external_account):
if external_account is None:
return None
return {
- 'id': external_account._id,
- 'provider_id': external_account.provider_id,
- 'provider_name': external_account.provider_name,
- 'provider_short_name': external_account.provider,
- 'display_name': external_account.display_name,
- 'profile_url': external_account.profile_url,
- 'nodes': [
+ "id": external_account._id,
+ "provider_id": external_account.provider_id,
+ "provider_name": external_account.provider_name,
+ "provider_short_name": external_account.provider,
+ "display_name": external_account.display_name,
+ "profile_url": external_account.profile_url,
+ "nodes": [
self.serialize_granted_node(node)
for node in self.user_settings.get_attached_nodes(
external_account=external_account
@@ -126,24 +126,30 @@ def serialize_account(self, external_account):
@collect_auth
def serialize_granted_node(self, node, auth):
-
node_settings = node.get_addon(
self.user_settings.oauth_provider.short_name
)
serializer = node_settings.serializer(node_settings=node_settings)
urls = serializer.addon_serialized_urls
- urls['view'] = node.url
+ urls["view"] = node.url
return {
- 'id': node._id,
- 'title': node.title if node.can_view(auth) else None,
- 'urls': urls,
+ "id": node._id,
+ "title": node.title if node.can_view(auth) else None,
+ "urls": urls,
}
class StorageAddonSerializer(OAuthAddonSerializer):
-
- REQUIRED_URLS = ('auth', 'importAuth', 'folders', 'files', 'config', 'deauthorize', 'accounts')
+ REQUIRED_URLS = (
+ "auth",
+ "importAuth",
+ "folders",
+ "files",
+ "config",
+ "deauthorize",
+ "accounts",
+ )
@abc.abstractmethod
def credentials_are_valid(self, user_settings, client=None):
@@ -157,55 +163,65 @@ def serialize_settings(self, node_settings, current_user, client=None):
user_settings = node_settings.user_settings
self.node_settings = node_settings
current_user_settings = current_user.get_addon(self.addon_short_name)
- user_is_owner = user_settings is not None and user_settings.owner == current_user
+ user_is_owner = (
+ user_settings is not None and user_settings.owner == current_user
+ )
- valid_credentials = self.credentials_are_valid(user_settings, client=client)
+ valid_credentials = self.credentials_are_valid(
+ user_settings, client=client
+ )
result = {
- 'userIsOwner': user_is_owner,
- 'nodeHasAuth': node_settings.has_auth,
- 'urls': self.serialized_urls,
- 'validCredentials': valid_credentials,
- 'userHasAuth': current_user_settings is not None and current_user_settings.has_auth,
+ "userIsOwner": user_is_owner,
+ "nodeHasAuth": node_settings.has_auth,
+ "urls": self.serialized_urls,
+ "validCredentials": valid_credentials,
+ "userHasAuth": current_user_settings is not None
+ and current_user_settings.has_auth,
}
if node_settings.has_auth:
# Add owner's profile URL
- result['urls']['owner'] = web_url_for(
- 'profile_view_id',
- uid=user_settings.owner._id
+ result["urls"]["owner"] = web_url_for(
+ "profile_view_id", uid=user_settings.owner._id
)
- result['ownerName'] = user_settings.owner.fullname
+ result["ownerName"] = user_settings.owner.fullname
# Show available folders
if node_settings.folder_id is None:
- result['folder'] = {'name': None, 'path': None}
+ result["folder"] = {"name": None, "path": None}
elif valid_credentials:
- result['folder'] = self.serialized_folder(node_settings)
+ result["folder"] = self.serialized_folder(node_settings)
return result
class CitationsAddonSerializer(OAuthAddonSerializer):
-
- REQUIRED_URLS = ('importAuth', 'folders', 'config', 'deauthorize', 'accounts')
+ REQUIRED_URLS = (
+ "importAuth",
+ "folders",
+ "config",
+ "deauthorize",
+ "accounts",
+ )
serialized_root_folder = {
- 'name': 'All Documents',
- 'provider_list_id': None,
- 'id': 'ROOT',
- 'parent_list_id': '__',
- 'kind': 'folder',
+ "name": "All Documents",
+ "provider_list_id": None,
+ "id": "ROOT",
+ "parent_list_id": "__",
+ "kind": "folder",
}
@property
def serialized_urls(self):
external_account = self.node_settings.external_account
ret = {
- 'auth': api_url_for('oauth_connect',
- service_name=self.addon_short_name),
- 'files': self.node_settings.owner.url,
+ "auth": api_url_for(
+ "oauth_connect", service_name=self.addon_short_name
+ ),
+ "files": self.node_settings.owner.url,
}
if external_account and external_account.profile_url:
- ret['owner'] = external_account.profile_url
+ ret["owner"] = external_account.profile_url
ret.update(super().serialized_urls)
return ret
@@ -213,9 +229,7 @@ def serialized_urls(self):
@property
def serialized_node_settings(self):
result = super().serialized_node_settings
- result['folder'] = {
- 'name': self.node_settings.fetch_folder_name
- }
+ result["folder"] = {"name": self.node_settings.fetch_folder_name}
return result
@property
@@ -224,14 +238,14 @@ def credentials_owner(self):
def serialize_folder(self, folder):
return {
- 'data': folder,
- 'kind': 'folder',
- 'name': folder['name'],
- 'id': folder['id'],
- 'urls': {
- 'fetch': self.node_settings.owner.api_url_for(
- f'{self.addon_short_name}_citation_list',
- list_id=folder['id']
+ "data": folder,
+ "kind": "folder",
+ "name": folder["name"],
+ "id": folder["id"],
+ "urls": {
+ "fetch": self.node_settings.owner.api_url_for(
+ f"{self.addon_short_name}_citation_list",
+ list_id=folder["id"],
),
},
}
@@ -240,16 +254,24 @@ def serialize_folder(self, folder):
def addon_serialized_urls(self):
node = self.node_settings.owner
return {
- 'importAuth': node.api_url_for(f'{self.addon_short_name}_import_auth'),
- 'folders': node.api_url_for(f'{self.addon_short_name}_citation_list'),
- 'config': node.api_url_for(f'{self.addon_short_name}_set_config'),
- 'deauthorize': node.api_url_for(f'{self.addon_short_name}_deauthorize_node'),
- 'accounts': node.api_url_for(f'{self.addon_short_name}_account_list'),
+ "importAuth": node.api_url_for(
+ f"{self.addon_short_name}_import_auth"
+ ),
+ "folders": node.api_url_for(
+ f"{self.addon_short_name}_citation_list"
+ ),
+ "config": node.api_url_for(f"{self.addon_short_name}_set_config"),
+ "deauthorize": node.api_url_for(
+ f"{self.addon_short_name}_deauthorize_node"
+ ),
+ "accounts": node.api_url_for(
+ f"{self.addon_short_name}_account_list"
+ ),
}
def serialize_citation(self, citation):
return {
- 'csl': citation,
- 'kind': 'file',
- 'id': citation['id'],
+ "csl": citation,
+ "kind": "file",
+ "id": citation["id"],
}
diff --git a/addons/base/signals.py b/addons/base/signals.py
index 2cb86aba118..4ae5eb6b150 100644
--- a/addons/base/signals.py
+++ b/addons/base/signals.py
@@ -2,6 +2,6 @@
import blinker
signals = blinker.Namespace()
-file_updated = signals.signal('file_updated')
-file_viewed = signals.signal('file_viewed')
-file_downloaded = signals.signal('file_downloaded')
+file_updated = signals.signal("file_updated")
+file_viewed = signals.signal("file_viewed")
+file_downloaded = signals.signal("file_downloaded")
diff --git a/addons/base/tests/base.py b/addons/base/tests/base.py
index 406cbe0a8a5..6b93b0b108e 100644
--- a/addons/base/tests/base.py
+++ b/addons/base/tests/base.py
@@ -24,11 +24,12 @@ class AddonTestCase:
- self.node_settings: AddonNodeSettings object for the addon
"""
+
DISABLE_OUTGOING_CONNECTIONS = True
- DB_NAME = getattr(django_settings, 'TEST_DB_ADDON_NAME', 'osf_addon')
+ DB_NAME = getattr(django_settings, "TEST_DB_ADDON_NAME", "osf_addon")
ADDON_SHORT_NAME = None
- OWNERS = ['user', 'node']
- NODE_USER_FIELD = 'user_settings'
+ OWNERS = ["user", "node"]
+ NODE_USER_FIELD = "user_settings"
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
@@ -46,18 +47,23 @@ def create_project(self):
return ProjectFactory(creator=self.user)
def set_user_settings(self, settings):
- raise NotImplementedError('Must define set_user_settings(self, settings) method')
+ raise NotImplementedError(
+ "Must define set_user_settings(self, settings) method"
+ )
def set_node_settings(self, settings):
- raise NotImplementedError('Must define set_node_settings(self, settings) method')
+ raise NotImplementedError(
+ "Must define set_node_settings(self, settings) method"
+ )
def create_user_settings(self):
- """Initialize user settings object if requested by `self.OWNERS`.
- """
- if 'user' not in self.OWNERS:
+ """Initialize user settings object if requested by `self.OWNERS`."""
+ if "user" not in self.OWNERS:
return
self.user.add_addon(self.ADDON_SHORT_NAME)
- assert self.user.has_addon(self.ADDON_SHORT_NAME), f'{self.ADDON_SHORT_NAME} is not enabled'
+ assert self.user.has_addon(
+ self.ADDON_SHORT_NAME
+ ), f"{self.ADDON_SHORT_NAME} is not enabled"
self.user_settings = self.user.get_addon(self.ADDON_SHORT_NAME)
self.set_user_settings(self.user_settings)
self.user_settings.save()
@@ -66,23 +72,24 @@ def create_node_settings(self):
"""Initialize node settings object if requested by `self.OWNERS`,
additionally linking to user settings if requested by `self.NODE_USER_FIELD`.
"""
- if 'node' not in self.OWNERS:
+ if "node" not in self.OWNERS:
return
self.project.add_addon(self.ADDON_SHORT_NAME, auth=Auth(self.user))
self.node_settings = self.project.get_addon(self.ADDON_SHORT_NAME)
# User has imported their addon settings to this node
if self.NODE_USER_FIELD:
- setattr(self.node_settings, self.NODE_USER_FIELD, self.user_settings)
+ setattr(
+ self.node_settings, self.NODE_USER_FIELD, self.user_settings
+ )
self.set_node_settings(self.node_settings)
self.node_settings.save()
def setUp(self):
-
super().setUp()
self.user = self.create_user()
if not self.ADDON_SHORT_NAME:
- raise ValueError('Must define ADDON_SHORT_NAME in the test class.')
+ raise ValueError("Must define ADDON_SHORT_NAME in the test class.")
self.user.save()
self.project = self.create_project()
self.project.save()
@@ -91,7 +98,6 @@ def setUp(self):
class OAuthAddonTestCaseMixin:
-
@property
def ExternalAccountFactory(self):
raise NotImplementedError()
@@ -112,4 +118,6 @@ def set_user_settings(self, settings):
self.auth = Auth(self.user)
def set_node_settings(self, settings):
- self.user_settings.grant_oauth_access(self.project, self.external_account)
+ self.user_settings.grant_oauth_access(
+ self.project, self.external_account
+ )
diff --git a/addons/base/tests/logger.py b/addons/base/tests/logger.py
index 9ec65021b1a..bef27a8d4cb 100644
--- a/addons/base/tests/logger.py
+++ b/addons/base/tests/logger.py
@@ -3,8 +3,8 @@
from framework.auth import Auth
from osf_tests.factories import AuthUserFactory, ProjectFactory
-class AddonNodeLoggerTestSuiteMixinBase:
+class AddonNodeLoggerTestSuiteMixinBase:
__metaclass__ = abc.ABCMeta
@property
@@ -27,21 +27,24 @@ def setUp(self):
class StorageAddonNodeLoggerTestSuiteMixin(AddonNodeLoggerTestSuiteMixinBase):
-
def setUp(self):
super().setUp()
def test_log_file_added(self):
- self.logger.log('file_added', save=True)
+ self.logger.log("file_added", save=True)
last_log = self.node.logs.latest()
- assert last_log.action == '{}_{}'.format(self.addon_short_name, 'file_added')
+ assert last_log.action == "{}_{}".format(
+ self.addon_short_name, "file_added"
+ )
def test_log_file_removed(self):
- self.logger.log('file_removed', save=True)
+ self.logger.log("file_removed", save=True)
last_log = self.node.logs.latest()
- assert last_log.action == '{}_{}'.format(self.addon_short_name, 'file_removed')
+ assert last_log.action == "{}_{}".format(
+ self.addon_short_name, "file_removed"
+ )
def test_log_deauthorized_when_node_settings_are_deleted(self):
node_settings = self.node.get_addon(self.addon_short_name)
@@ -49,7 +52,7 @@ def test_log_deauthorized_when_node_settings_are_deleted(self):
# sanity check
assert node_settings.deleted
- self.logger.log(action='node_deauthorized', save=True)
+ self.logger.log(action="node_deauthorized", save=True)
last_log = self.node.logs.latest()
- assert last_log.action == f'{self.addon_short_name}_node_deauthorized'
+ assert last_log.action == f"{self.addon_short_name}_node_deauthorized"
diff --git a/addons/base/tests/models.py b/addons/base/tests/models.py
index c20f834cf24..752eca784ca 100644
--- a/addons/base/tests/models.py
+++ b/addons/base/tests/models.py
@@ -12,13 +12,11 @@
from osf_tests.factories import ProjectFactory, UserFactory
from tests.utils import mock_auth
from addons.base import exceptions
-from osf_tests.conftest import request_context
pytestmark = pytest.mark.django_db
class OAuthAddonModelTestSuiteMixinBase:
-
___metaclass__ = abc.ABCMeta
@property
@@ -38,7 +36,6 @@ def ExternalAccountFactory(self):
class OAuthAddonUserSettingTestSuiteMixin(OAuthAddonModelTestSuiteMixinBase):
-
def setUp(self):
self.node = ProjectFactory()
self.user = self.node.creator
@@ -59,10 +56,11 @@ def test_merge_user_settings(self):
other_account = self.ExternalAccountFactory()
other_user.external_accounts.add(other_account)
other_user_settings = other_user.get_or_add_addon(self.short_name)
- other_node_settings = other_node.get_or_add_addon(self.short_name, auth=Auth(other_user))
+ other_node_settings = other_node.get_or_add_addon(
+ self.short_name, auth=Auth(other_user)
+ )
other_node_settings.set_auth(
- user=other_user,
- external_account=other_account
+ user=other_user, external_account=other_account
)
assert other_node_settings.has_auth
@@ -86,19 +84,21 @@ def test_grant_oauth_access_no_metadata(self):
)
self.user_settings.save()
- assert self.user_settings.oauth_grants == {self.node._id: {self.external_account._id: {}}}
+ assert self.user_settings.oauth_grants == {
+ self.node._id: {self.external_account._id: {}}
+ }
def test_grant_oauth_access_metadata(self):
self.user_settings.grant_oauth_access(
node=self.node,
external_account=self.external_account,
- metadata={'folder': 'fake_folder_id'}
+ metadata={"folder": "fake_folder_id"},
)
self.user_settings.save()
assert self.user_settings.oauth_grants == {
self.node._id: {
- self.external_account._id: {'folder': 'fake_folder_id'}
+ self.external_account._id: {"folder": "fake_folder_id"}
},
}
@@ -110,43 +110,43 @@ def test_verify_oauth_access_no_metadata(self):
self.user_settings.save()
assert self.user_settings.verify_oauth_access(
- node=self.node,
- external_account=self.external_account
- )
+ node=self.node, external_account=self.external_account
+ )
assert not self.user_settings.verify_oauth_access(
- node=self.node,
- external_account=self.ExternalAccountFactory()
- )
+ node=self.node, external_account=self.ExternalAccountFactory()
+ )
def test_verify_oauth_access_metadata(self):
self.user_settings.grant_oauth_access(
node=self.node,
external_account=self.external_account,
- metadata={'folder': 'fake_folder_id'}
+ metadata={"folder": "fake_folder_id"},
)
self.user_settings.save()
assert self.user_settings.verify_oauth_access(
- node=self.node,
- external_account=self.external_account,
- metadata={'folder': 'fake_folder_id'}
- )
+ node=self.node,
+ external_account=self.external_account,
+ metadata={"folder": "fake_folder_id"},
+ )
assert not self.user_settings.verify_oauth_access(
- node=self.node,
- external_account=self.external_account,
- metadata={'folder': 'another_folder_id'}
- )
+ node=self.node,
+ external_account=self.external_account,
+ metadata={"folder": "another_folder_id"},
+ )
-class OAuthAddonNodeSettingsTestSuiteMixin(OAuthAddonModelTestSuiteMixinBase):
+class OAuthAddonNodeSettingsTestSuiteMixin(OAuthAddonModelTestSuiteMixinBase):
@pytest.fixture(autouse=True)
def _request_context(self, app):
- context = app.test_request_context(headers={
- 'Remote-Addr': '146.9.219.56',
- 'User-Agent': 'Mozilla/5.0 (X11; U; SunOS sun4u; en-US; rv:0.9.4.1) Gecko/20020518 Netscape6/6.2.3'
- })
+ context = app.test_request_context(
+ headers={
+ "Remote-Addr": "146.9.219.56",
+ "User-Agent": "Mozilla/5.0 (X11; U; SunOS sun4u; en-US; rv:0.9.4.1) Gecko/20020518 Netscape6/6.2.3",
+ }
+ )
context.push()
yield context
context.pop()
@@ -168,9 +168,9 @@ def UserSettingsFactory(self):
def _node_settings_class_kwargs(self, node, user_settings):
return {
- 'user_settings': self.user_settings,
- 'folder_id': '1234567890',
- 'owner': self.node
+ "user_settings": self.user_settings,
+ "folder_id": "1234567890",
+ "owner": self.node,
}
def setUp(self):
@@ -186,13 +186,13 @@ def setUp(self):
self.user_settings.grant_oauth_access(
node=self.node,
external_account=self.external_account,
- metadata={'folder': '1234567890'}
+ metadata={"folder": "1234567890"},
)
self.user_settings.save()
self.node_settings = self.NodeSettingsFactory(
external_account=self.external_account,
- **self._node_settings_class_kwargs(self.node, self.user_settings)
+ **self._node_settings_class_kwargs(self.node, self.user_settings),
)
@pytest.mark.django_db
@@ -220,8 +220,9 @@ def test_complete_has_auth_not_verified(self):
assert self.user_settings.oauth_grants == {self.node._id: {}}
def test_revoke_remote_access_called(self):
-
- with mock.patch.object(self.user_settings, 'revoke_remote_oauth_access') as mock_revoke:
+ with mock.patch.object(
+ self.user_settings, "revoke_remote_oauth_access"
+ ) as mock_revoke:
with mock_auth(self.user):
self.user_settings.revoke_oauth_access(self.external_account)
assert mock_revoke.call_count == 1
@@ -230,7 +231,9 @@ def test_revoke_remote_access_not_called(self):
user2 = UserFactory()
user2.external_accounts.add(self.external_account)
user2.save()
- with mock.patch.object(self.user_settings, 'revoke_remote_oauth_access') as mock_revoke:
+ with mock.patch.object(
+ self.user_settings, "revoke_remote_oauth_access"
+ ) as mock_revoke:
with mock_auth(self.user):
self.user_settings.revoke_oauth_access(self.external_account)
assert mock_revoke.call_count == 0
@@ -242,15 +245,19 @@ def test_complete_auth_false(self):
assert not self.node_settings.complete
def test_fields(self):
- node_settings = self.NodeSettingsClass(owner=ProjectFactory(), user_settings=self.user_settings)
+ node_settings = self.NodeSettingsClass(
+ owner=ProjectFactory(), user_settings=self.user_settings
+ )
node_settings.save()
assert node_settings.user_settings
assert node_settings.user_settings.owner == self.user
- assert hasattr(node_settings, 'folder_id')
- assert hasattr(node_settings, 'user_settings')
+ assert hasattr(node_settings, "folder_id")
+ assert hasattr(node_settings, "user_settings")
def test_folder_defaults_to_none(self):
- node_settings = self.NodeSettingsClass(user_settings=self.user_settings)
+ node_settings = self.NodeSettingsClass(
+ user_settings=self.user_settings
+ )
node_settings.save()
assert node_settings.folder_id is None
@@ -258,7 +265,9 @@ def test_has_auth(self):
self.user.external_accounts.clear()
self.user_settings.reload()
node = ProjectFactory()
- settings = self.NodeSettingsClass(user_settings=self.user_settings, owner=node)
+ settings = self.NodeSettingsClass(
+ user_settings=self.user_settings, owner=node
+ )
settings.save()
assert not settings.has_auth
@@ -291,14 +300,14 @@ def test_to_json(self):
settings = self.node_settings
user = UserFactory()
result = settings.to_json(user)
- assert result['addon_short_name'] == self.short_name
+ assert result["addon_short_name"] == self.short_name
def test_delete(self):
assert self.node_settings.user_settings
assert self.node_settings.folder_id
old_logs = list(self.node.logs.all())
mock_now = datetime.datetime(2017, 3, 16, 11, 00, tzinfo=pytz.utc)
- with mock.patch.object(timezone, 'now', return_value=mock_now):
+ with mock.patch.object(timezone, "now", return_value=mock_now):
self.node_settings.delete()
self.node_settings.save()
assert self.node_settings.user_settings is None
@@ -308,9 +317,7 @@ def test_delete(self):
assert list(self.node.logs.all()) == list(old_logs)
def test_on_delete(self):
- self.user.delete_addon(
- self.user_settings.oauth_provider.short_name
- )
+ self.user.delete_addon(self.user_settings.oauth_provider.short_name)
self.node_settings.reload()
@@ -326,20 +333,20 @@ def test_deauthorize(self):
assert self.node_settings.folder_id is None
last_log = self.node.logs.first()
- assert last_log.action == f'{self.short_name}_node_deauthorized'
+ assert last_log.action == f"{self.short_name}_node_deauthorized"
params = last_log.params
- assert 'node' in params
- assert 'project' in params
+ assert "node" in params
+ assert "project" in params
def test_set_folder(self):
- folder_id = '1234567890'
+ folder_id = "1234567890"
self.node_settings.set_folder(folder_id, auth=Auth(self.user))
self.node_settings.save()
# Folder was set
assert self.node_settings.folder_id == folder_id
# Log was saved
last_log = self.node.logs.first()
- assert last_log.action == f'{self.short_name}_folder_selected'
+ assert last_log.action == f"{self.short_name}_folder_selected"
def test_set_user_auth(self):
node_settings = self.NodeSettingsFactory()
@@ -357,17 +364,17 @@ def test_set_user_auth(self):
assert node_settings.user_settings == user_settings
# A log was saved
last_log = node_settings.owner.logs.first()
- assert last_log.action == f'{self.short_name}_node_authorized'
+ assert last_log.action == f"{self.short_name}_node_authorized"
log_params = last_log.params
- assert log_params['node'] == node_settings.owner._id
+ assert log_params["node"] == node_settings.owner._id
assert last_log.user == user_settings.owner
def test_serialize_credentials(self):
- self.user_settings.external_accounts[0].oauth_key = 'key-11'
+ self.user_settings.external_accounts[0].oauth_key = "key-11"
self.user_settings.save()
credentials = self.node_settings.serialize_waterbutler_credentials()
- expected = {'token': self.node_settings.external_account.oauth_key}
+ expected = {"token": self.node_settings.external_account.oauth_key}
assert credentials == expected
def test_serialize_credentials_not_authorized(self):
@@ -378,7 +385,7 @@ def test_serialize_credentials_not_authorized(self):
def test_serialize_settings(self):
settings = self.node_settings.serialize_waterbutler_settings()
- expected = {'folder': self.node_settings.folder_id}
+ expected = {"folder": self.node_settings.folder_id}
assert settings == expected
def test_serialize_settings_not_configured(self):
@@ -388,18 +395,18 @@ def test_serialize_settings_not_configured(self):
self.node_settings.serialize_waterbutler_settings()
def test_create_log(self):
- action = 'file_added'
- path = 'pizza.nii'
+ action = "file_added"
+ path = "pizza.nii"
nlog = self.node.logs.count()
self.node_settings.create_waterbutler_log(
auth=Auth(user=self.user),
action=action,
- metadata={'path': path, 'materialized': path},
+ metadata={"path": path, "materialized": path},
)
self.node.reload()
assert self.node.logs.count() == nlog + 1
- assert self.node.logs.latest().action == f'{self.short_name}_{action}'
- assert self.node.logs.latest().params['path'] == path
+ assert self.node.logs.latest().action == f"{self.short_name}_{action}"
+ assert self.node.logs.latest().params["path"] == path
def test_after_fork_by_authorized_user(self):
fork = ProjectFactory()
@@ -412,34 +419,36 @@ def test_after_fork_by_unauthorized_user(self):
fork = ProjectFactory()
user = UserFactory()
clone = self.node_settings.after_fork(
- node=self.node, fork=fork, user=user,
- save=True
+ node=self.node, fork=fork, user=user, save=True
)
assert clone.user_settings is None
def test_before_remove_contributor_message(self):
message = self.node_settings.before_remove_contributor(
- self.node, self.user)
+ self.node, self.user
+ )
assert message
assert self.user.fullname in message
assert self.node.project_or_component in message
def test_after_remove_authorized_user_not_self(self):
message = self.node_settings.after_remove_contributor(
- self.node, self.user_settings.owner)
+ self.node, self.user_settings.owner
+ )
self.node_settings.save()
assert self.node_settings.user_settings is None
assert message
- assert 'You can re-authenticate' in message
+ assert "You can re-authenticate" in message
def test_after_remove_authorized_user_self(self):
auth = Auth(user=self.user_settings.owner)
message = self.node_settings.after_remove_contributor(
- self.node, self.user_settings.owner, auth)
+ self.node, self.user_settings.owner, auth
+ )
self.node_settings.save()
assert self.node_settings.user_settings is None
assert message
- assert 'You can re-authenticate' not in message
+ assert "You can re-authenticate" not in message
def test_after_delete(self):
self.node.remove_node(Auth(user=self.node.creator))
@@ -462,52 +471,59 @@ def OAuthProviderClass(self):
class OAuthCitationsNodeSettingsTestSuiteMixin(
- OAuthAddonNodeSettingsTestSuiteMixin,
- OAuthCitationsTestSuiteMixinBase):
-
+ OAuthAddonNodeSettingsTestSuiteMixin, OAuthCitationsTestSuiteMixinBase
+):
def setUp(self):
super().setUp()
self.user_settings.grant_oauth_access(
node=self.node,
external_account=self.external_account,
- metadata={'folder': 'fake_folder_id'}
+ metadata={"folder": "fake_folder_id"},
)
self.user_settings.save()
def test_fetch_folder_name_root(self):
- self.node_settings.list_id = 'ROOT'
+ self.node_settings.list_id = "ROOT"
- assert self.node_settings.fetch_folder_name == 'All Documents'
+ assert self.node_settings.fetch_folder_name == "All Documents"
def test_selected_folder_name_empty(self):
self.node_settings.list_id = None
- assert self.node_settings.fetch_folder_name == ''
+ assert self.node_settings.fetch_folder_name == ""
def test_selected_folder_name(self):
# Mock the return from api call to get the folder's name
mock_folder = MockFolder()
name = None
- with mock.patch.object(self.OAuthProviderClass, '_folder_metadata', return_value=mock_folder):
+ with mock.patch.object(
+ self.OAuthProviderClass,
+ "_folder_metadata",
+ return_value=mock_folder,
+ ):
name = self.node_settings.fetch_folder_name
- assert name == 'Fake Folder'
+ assert name == "Fake Folder"
def test_api_not_cached(self):
# The first call to .api returns a new object
- with mock.patch.object(self.NodeSettingsClass, 'oauth_provider') as mock_api:
+ with mock.patch.object(
+ self.NodeSettingsClass, "oauth_provider"
+ ) as mock_api:
api = self.node_settings.api
mock_api.assert_called_once_with(account=self.external_account)
assert api == mock_api()
def test_api_cached(self):
# Repeated calls to .api returns the same object
- with mock.patch.object(self.NodeSettingsClass, 'oauth_provider') as mock_api:
- self.node_settings._api = 'testapi'
+ with mock.patch.object(
+ self.NodeSettingsClass, "oauth_provider"
+ ) as mock_api:
+ self.node_settings._api = "testapi"
api = self.node_settings.api
assert not mock_api.called
- assert api == 'testapi'
+ assert api == "testapi"
############# Overrides ##############
# `pass` due to lack of waterbutler- #
@@ -516,9 +532,9 @@ def test_api_cached(self):
def _node_settings_class_kwargs(self, node, user_settings):
return {
- 'user_settings': self.user_settings,
- 'list_id': 'fake_folder_id',
- 'owner': self.node
+ "user_settings": self.user_settings,
+ "list_id": "fake_folder_id",
+ "owner": self.node,
}
def test_serialize_credentials(self):
@@ -537,8 +553,8 @@ def test_create_log(self):
pass
def test_set_folder(self):
- folder_id = 'fake-folder-id'
- folder_name = 'fake-folder-name'
+ folder_id = "fake-folder-id"
+ folder_name = "fake-folder-name"
self.node_settings.clear_settings()
self.node_settings.save()
@@ -555,58 +571,70 @@ def test_set_folder(self):
)
# instance was updated
- assert self.node_settings.list_id == 'fake-folder-id'
+ assert self.node_settings.list_id == "fake-folder-id"
# user_settings was updated
# TODO: the call to grant_oauth_access should be mocked
assert self.user_settings.verify_oauth_access(
- node=self.node,
- external_account=self.external_account,
- metadata={'folder': 'fake-folder-id'}
- )
+ node=self.node,
+ external_account=self.external_account,
+ metadata={"folder": "fake-folder-id"},
+ )
log = self.node.logs.latest()
- assert log.action == f'{self.short_name}_folder_selected'
- assert log.params['folder_id'] == folder_id
- assert log.params['folder_name'] == folder_name
+ assert log.action == f"{self.short_name}_folder_selected"
+ assert log.params["folder_id"] == folder_id
+ assert log.params["folder_name"] == folder_name
- @mock.patch('framework.status.push_status_message')
+ @mock.patch("framework.status.push_status_message")
def test_remove_contributor_authorizer(self, mock_push_status):
contributor = UserFactory()
self.node.add_contributor(contributor, permissions=ADMIN)
- self.node.remove_contributor(self.node.creator, auth=Auth(user=contributor))
+ self.node.remove_contributor(
+ self.node.creator, auth=Auth(user=contributor)
+ )
self.node_settings.reload()
self.user_settings.reload()
assert not self.node_settings.has_auth
- assert not self.user_settings.verify_oauth_access(self.node, self.external_account)
+ assert not self.user_settings.verify_oauth_access(
+ self.node, self.external_account
+ )
def test_remove_contributor_not_authorizer(self):
contributor = UserFactory()
self.node.add_contributor(contributor)
- self.node.remove_contributor(contributor, auth=Auth(user=self.node.creator))
+ self.node.remove_contributor(
+ contributor, auth=Auth(user=self.node.creator)
+ )
assert self.node_settings.has_auth
- assert self.user_settings.verify_oauth_access(self.node, self.external_account)
+ assert self.user_settings.verify_oauth_access(
+ self.node, self.external_account
+ )
- @mock.patch('framework.status.push_status_message')
+ @mock.patch("framework.status.push_status_message")
def test_fork_by_authorizer(self, mock_push_status):
fork = self.node.fork_node(auth=Auth(user=self.node.creator))
self.user_settings.reload()
assert fork.get_addon(self.short_name).has_auth
- assert self.user_settings.verify_oauth_access(fork, self.external_account)
+ assert self.user_settings.verify_oauth_access(
+ fork, self.external_account
+ )
- @mock.patch('framework.status.push_status_message')
+ @mock.patch("framework.status.push_status_message")
def test_fork_not_by_authorizer(self, mock_push_status):
contributor = UserFactory()
self.node.add_contributor(contributor)
fork = self.node.fork_node(auth=Auth(user=contributor))
assert not fork.get_addon(self.short_name).has_auth
- assert not self.user_settings.verify_oauth_access(fork, self.external_account)
+ assert not self.user_settings.verify_oauth_access(
+ fork, self.external_account
+ )
-class CitationAddonProviderTestSuiteMixin(OAuthCitationsTestSuiteMixinBase):
+class CitationAddonProviderTestSuiteMixin(OAuthCitationsTestSuiteMixinBase):
@property
@abc.abstractmethod
def ApiExceptionClass(self):
@@ -630,13 +658,17 @@ def test_citation_lists(self):
self.provider._client = mock_client
mock_account = mock.Mock()
self.provider.account = mock_account
- res = self.provider.citation_lists(self.ProviderClass()._extract_folder)
- assert res[1]['name'] == mock_folders[0].name
- assert res[1]['id'] == mock_folders[0].json['id']
+ res = self.provider.citation_lists(
+ self.ProviderClass()._extract_folder
+ )
+ assert res[1]["name"] == mock_folders[0].name
+ assert res[1]["id"] == mock_folders[0].json["id"]
def test_client_not_cached(self):
# The first call to .client returns a new client
- with mock.patch.object(self.OAuthProviderClass, '_get_client') as mock_get_client:
+ with mock.patch.object(
+ self.OAuthProviderClass, "_get_client"
+ ) as mock_get_client:
mock_account = mock.Mock()
mock_account.expires_at = timezone.now()
self.provider.account = mock_account
@@ -646,20 +678,28 @@ def test_client_not_cached(self):
def test_client_cached(self):
# Repeated calls to .client returns the same client
- with mock.patch.object(self.OAuthProviderClass, '_get_client') as mock_get_client:
+ with mock.patch.object(
+ self.OAuthProviderClass, "_get_client"
+ ) as mock_get_client:
self.provider._client = mock.Mock()
res = self.provider.client
assert res == self.provider._client
assert not mock_get_client.called
def test_has_access(self):
- with mock.patch.object(self.OAuthProviderClass, '_get_client') as mock_get_client:
+ with mock.patch.object(
+ self.OAuthProviderClass, "_get_client"
+ ) as mock_get_client:
mock_client = mock.Mock()
mock_error = mock.PropertyMock()
mock_error.status_code = 403
- mock_error.text = 'Mocked 403 ApiException'
- mock_client.folders.list.side_effect = self.ApiExceptionClass(mock_error)
- mock_client.collections.side_effect = self.ApiExceptionClass(mock_error)
+ mock_error.text = "Mocked 403 ApiException"
+ mock_client.folders.list.side_effect = self.ApiExceptionClass(
+ mock_error
+ )
+ mock_client.collections.side_effect = self.ApiExceptionClass(
+ mock_error
+ )
mock_get_client.return_value = mock_client
with pytest.raises(HTTPError) as exc_info:
self.provider.client
diff --git a/addons/base/tests/serializers.py b/addons/base/tests/serializers.py
index 545761dcf12..7b7f7b16c0c 100644
--- a/addons/base/tests/serializers.py
+++ b/addons/base/tests/serializers.py
@@ -10,7 +10,6 @@
class AddonSerializerTestSuiteMixin:
-
__metaclass__ = abc.ABCMeta
@property
@@ -46,26 +45,37 @@ def setUp(self):
self.user = AuthUserFactory()
self.node = ProjectFactory(creator=self.user)
self.set_user_settings(self.user)
- assert getattr(self, 'user_settings') is not None, "'set_user_settings' should set the 'user_settings' attribute of the instance to an instance of \
+ assert (
+ getattr(self, "user_settings") is not None
+ ), (
+ "'set_user_settings' should set the 'user_settings' attribute of the instance to an instance of \
the appropriate user settings model."
+ )
self.set_node_settings(self.user_settings)
- assert getattr(self, 'node_settings') is not None, "'set_node_settings' should set the 'user_settings' attribute of the instance to an instance of \
+ assert (
+ getattr(self, "node_settings") is not None
+ ), (
+ "'set_node_settings' should set the 'user_settings' attribute of the instance to an instance of \
the appropriate node settings model."
+ )
self.ser = self.Serializer(
- user_settings=self.user_settings,
- node_settings=self.node_settings
+ user_settings=self.user_settings, node_settings=self.node_settings
)
def test_serialized_node_settings_unauthorized(self):
- with mock.patch.object(type(self.node_settings), 'has_auth', return_value=False):
+ with mock.patch.object(
+ type(self.node_settings), "has_auth", return_value=False
+ ):
serialized = self.ser.serialized_node_settings
for setting in self.required_settings:
assert setting in serialized
def test_serialized_node_settings_authorized(self):
- with mock.patch.object(type(self.node_settings), 'has_auth', return_value=True):
+ with mock.patch.object(
+ type(self.node_settings), "has_auth", return_value=True
+ ):
serialized = self.ser.serialized_node_settings
for setting in self.required_settings:
assert setting in serialized
@@ -74,7 +84,6 @@ def test_serialized_node_settings_authorized(self):
class OAuthAddonSerializerTestSuiteMixin(AddonSerializerTestSuiteMixin):
-
def set_user_settings(self, user):
self.user_settings = user.get_or_add_addon(self.addon_short_name)
self.external_account = self.ExternalAccountFactory()
@@ -82,8 +91,12 @@ def set_user_settings(self, user):
self.user.save()
def set_node_settings(self, user_settings):
- self.node_settings = self.node.get_or_add_addon(self.addon_short_name, auth=Auth(user_settings.owner))
- self.node_settings.set_auth(self.user_settings.external_accounts[0], self.user)
+ self.node_settings = self.node.get_or_add_addon(
+ self.addon_short_name, auth=Auth(user_settings.owner)
+ )
+ self.node_settings.set_auth(
+ self.user_settings.external_accounts[0], self.user
+ )
def test_credentials_owner(self):
owner = self.ser.credentials_owner
@@ -108,8 +121,10 @@ def test_user_is_owner_node_not_authorized_user_has_accounts(self):
def test_user_is_owner_node_authorized_user_is_not_owner(self):
self.node_settings.external_account = self.ExternalAccountFactory()
- with mock.patch('addons.base.models.BaseOAuthUserSettings.verify_oauth_access',
- return_value=True):
+ with mock.patch(
+ "addons.base.models.BaseOAuthUserSettings.verify_oauth_access",
+ return_value=True,
+ ):
self.user.external_accounts.clear()
assert not self.ser.user_is_owner
@@ -117,7 +132,7 @@ def test_user_is_owner_node_authorized_user_is_owner(self):
assert self.ser.user_is_owner
def test_serialized_urls_checks_required(self):
- with mock.patch.object(self.ser, 'REQUIRED_URLS', ('foobar', )):
+ with mock.patch.object(self.ser, "REQUIRED_URLS", ("foobar",)):
with pytest.raises(AssertionError):
self.ser.serialized_urls
@@ -125,7 +140,9 @@ def test_serialized_acccounts(self):
ea = self.ExternalAccountFactory()
self.user.external_accounts.add(ea)
- with mock.patch.object(type(self.ser), 'serialize_account') as mock_serialize_account:
+ with mock.patch.object(
+ type(self.ser), "serialize_account"
+ ) as mock_serialize_account:
mock_serialize_account.return_value = {}
serialized = self.ser.serialized_accounts
assert len(serialized) == self.user.external_accounts.count()
@@ -134,36 +151,39 @@ def test_serialized_acccounts(self):
def test_serialize_acccount(self):
ea = self.ExternalAccountFactory()
expected = {
- 'id': ea._id,
- 'provider_id': ea.provider_id,
- 'provider_name': ea.provider_name,
- 'provider_short_name': ea.provider,
- 'display_name': ea.display_name,
- 'profile_url': ea.profile_url,
- 'nodes': [],
+ "id": ea._id,
+ "provider_id": ea.provider_id,
+ "provider_name": ea.provider_name,
+ "provider_short_name": ea.provider,
+ "display_name": ea.display_name,
+ "profile_url": ea.profile_url,
+ "nodes": [],
}
assert self.ser.serialize_account(ea) == expected
def test_serialized_user_settings(self):
- with mock.patch.object(self.Serializer, 'serialized_accounts', return_value=[]):
+ with mock.patch.object(
+ self.Serializer, "serialized_accounts", return_value=[]
+ ):
serialized = self.ser.serialized_user_settings
- assert 'accounts' in serialized
+ assert "accounts" in serialized
def test_serialize_granted_node(self):
with mock_auth(self.user):
- serialized = self.ser.serialize_granted_node(self.node, auth=Auth(self.user))
- for key in ('id', 'title', 'urls'):
+ serialized = self.ser.serialize_granted_node(
+ self.node, auth=Auth(self.user)
+ )
+ for key in ("id", "title", "urls"):
assert key in serialized
- assert self.node._id == serialized['id']
- assert self.node.title == serialized['title']
- assert 'view' in serialized['urls']
- assert serialized['urls']['view'] == self.node.url
+ assert self.node._id == serialized["id"]
+ assert self.node.title == serialized["title"]
+ assert "view" in serialized["urls"]
+ assert serialized["urls"]["view"] == self.node.url
class StorageAddonSerializerTestSuiteMixin(OAuthAddonSerializerTestSuiteMixin):
-
- required_settings = ('userIsOwner', 'nodeHasAuth', 'urls', 'userHasAuth')
- required_settings_authorized = ('ownerName', )
+ required_settings = ("userIsOwner", "nodeHasAuth", "urls", "userHasAuth")
+ required_settings_authorized = ("ownerName",)
@property
@abc.abstractmethod
@@ -178,44 +198,63 @@ def set_provider_id(self):
pass
def test_serialize_settings_unauthorized(self):
- with mock.patch.object(type(self.node_settings), 'has_auth', return_value=False):
- serialized = self.ser.serialize_settings(self.node_settings, self.user, self.client)
+ with mock.patch.object(
+ type(self.node_settings), "has_auth", return_value=False
+ ):
+ serialized = self.ser.serialize_settings(
+ self.node_settings, self.user, self.client
+ )
for key in self.required_settings:
assert key in serialized
def test_serialize_settings_authorized(self):
- with mock.patch.object(type(self.node_settings), 'has_auth', return_value=True):
- serialized = self.ser.serialize_settings(self.node_settings, self.user, self.client)
+ with mock.patch.object(
+ type(self.node_settings), "has_auth", return_value=True
+ ):
+ serialized = self.ser.serialize_settings(
+ self.node_settings, self.user, self.client
+ )
for key in self.required_settings:
assert key in serialized
- assert 'owner' in serialized['urls']
- assert serialized['urls']['owner'] == web_url_for(
- 'profile_view_id',
- uid=self.user_settings.owner._id
+ assert "owner" in serialized["urls"]
+ assert serialized["urls"]["owner"] == web_url_for(
+ "profile_view_id", uid=self.user_settings.owner._id
)
- assert 'ownerName' in serialized
- assert serialized['ownerName'] == self.user_settings.owner.fullname
- assert 'folder' in serialized
+ assert "ownerName" in serialized
+ assert serialized["ownerName"] == self.user_settings.owner.fullname
+ assert "folder" in serialized
def test_serialize_settings_authorized_no_folder(self):
- with mock.patch.object(type(self.node_settings), 'has_auth', return_value=True):
- serialized = self.ser.serialize_settings(self.node_settings, self.user, self.client)
- assert 'folder' in serialized
- assert serialized['folder'] == {'name': None, 'path': None}
+ with mock.patch.object(
+ type(self.node_settings), "has_auth", return_value=True
+ ):
+ serialized = self.ser.serialize_settings(
+ self.node_settings, self.user, self.client
+ )
+ assert "folder" in serialized
+ assert serialized["folder"] == {"name": None, "path": None}
def test_serialize_settings_authorized_folder_is_set(self):
- self.set_provider_id('foo')
- with mock.patch.object(type(self.node_settings), 'has_auth', return_value=True):
- with mock.patch.object(self.ser, 'serialized_folder') as mock_serialized_folder:
+ self.set_provider_id("foo")
+ with mock.patch.object(
+ type(self.node_settings), "has_auth", return_value=True
+ ):
+ with mock.patch.object(
+ self.ser, "serialized_folder"
+ ) as mock_serialized_folder:
mock_serialized_folder.return_value = {}
- serialized = self.ser.serialize_settings(self.node_settings, self.user, self.client)
- assert 'folder' in serialized
+ serialized = self.ser.serialize_settings(
+ self.node_settings, self.user, self.client
+ )
+ assert "folder" in serialized
assert mock_serialized_folder.called
-class CitationAddonSerializerTestSuiteMixin(OAuthAddonSerializerTestSuiteMixin):
- required_settings = ('userIsOwner', 'nodeHasAuth', 'urls', 'userHasAuth')
- required_settings_authorized = ('ownerName', )
+class CitationAddonSerializerTestSuiteMixin(
+ OAuthAddonSerializerTestSuiteMixin
+):
+ required_settings = ("userIsOwner", "nodeHasAuth", "urls", "userHasAuth")
+ required_settings_authorized = ("ownerName",)
@property
@abc.abstractmethod
@@ -224,12 +263,12 @@ def folder(self):
def test_serialize_folder(self):
serialized_folder = self.ser.serialize_folder(self.folder)
- assert serialized_folder['id'] == self.folder['id']
- assert serialized_folder['name'] == self.folder.name
- assert serialized_folder['kind'] == 'folder'
+ assert serialized_folder["id"] == self.folder["id"]
+ assert serialized_folder["name"] == self.folder.name
+ assert serialized_folder["kind"] == "folder"
def test_serialize_citation(self):
serialized_citation = self.ser.serialize_citation(self.folder)
- assert serialized_citation['csl'] == self.folder
- assert serialized_citation['id'] == self.folder['id']
- assert serialized_citation['kind'] == 'file'
+ assert serialized_citation["csl"] == self.folder
+ assert serialized_citation["id"] == self.folder["id"]
+ assert serialized_citation["kind"] == "file"
diff --git a/addons/base/tests/utils.py b/addons/base/tests/utils.py
index 5ab0bbbf45c..90cd79220f6 100644
--- a/addons/base/tests/utils.py
+++ b/addons/base/tests/utils.py
@@ -1,31 +1,40 @@
import pytest
from addons.base.utils import get_mfr_url
-from addons.osfstorage.tests.utils import StorageTestCase
from tests.base import OsfTestCase
-from osf_tests.factories import ProjectFactory, UserFactory, RegionFactory, CommentFactory
+from osf_tests.factories import ProjectFactory, UserFactory, CommentFactory
from website.settings import MFR_SERVER_URL
class MockFolder(dict):
-
def __init__(self):
- self.name = 'Fake Folder'
- self.json = {'id': 'Fake Key', 'parent_id': 'cba321', 'name': 'Fake Folder'}
- self['data'] = {'name': 'Fake Folder', 'key': 'Fake Key', 'parentCollection': False}
- self['library'] = {'type': 'personal', 'id': '34241'}
- self['name'] = 'Fake Folder'
- self['id'] = 'Fake Key'
+ self.name = "Fake Folder"
+ self.json = {
+ "id": "Fake Key",
+ "parent_id": "cba321",
+ "name": "Fake Folder",
+ }
+ self["data"] = {
+ "name": "Fake Folder",
+ "key": "Fake Key",
+ "parentCollection": False,
+ }
+ self["library"] = {"type": "personal", "id": "34241"}
+ self["name"] = "Fake Folder"
+ self["id"] = "Fake Key"
class MockLibrary(dict):
-
def __init__(self):
- self.name = 'Fake Library'
- self.json = {'id': 'Fake Library Key', 'parent_id': 'cba321'}
- self['data'] = {'name': 'Fake Library', 'key': 'Fake Key', 'id': '12345' }
- self['name'] = 'Fake Library'
- self['id'] = 'Fake Library Key'
+ self.name = "Fake Library"
+ self.json = {"id": "Fake Library Key", "parent_id": "cba321"}
+ self["data"] = {
+ "name": "Fake Library",
+ "key": "Fake Key",
+ "id": "12345",
+ }
+ self["name"] = "Fake Library"
+ self["id"] = "Fake Library Key"
@pytest.mark.django_db
@@ -34,6 +43,9 @@ def test_mfr_url(self):
user = UserFactory()
project = ProjectFactory(creator=user)
comment = CommentFactory()
- assert get_mfr_url(project, 'github') == MFR_SERVER_URL
- assert get_mfr_url(project, 'osfstorage') == project.osfstorage_region.mfr_url
- assert get_mfr_url(comment, 'osfstorage') == MFR_SERVER_URL
+ assert get_mfr_url(project, "github") == MFR_SERVER_URL
+ assert (
+ get_mfr_url(project, "osfstorage")
+ == project.osfstorage_region.mfr_url
+ )
+ assert get_mfr_url(comment, "osfstorage") == MFR_SERVER_URL
diff --git a/addons/base/tests/views.py b/addons/base/tests/views.py
index 33675736754..02b97080069 100644
--- a/addons/base/tests/views.py
+++ b/addons/base/tests/views.py
@@ -21,7 +21,6 @@
class OAuthAddonAuthViewsTestCaseMixin(OAuthAddonTestCaseMixin):
-
@property
def ADDON_SHORT_NAME(self):
raise NotImplementedError()
@@ -35,10 +34,7 @@ def Provider(self):
raise NotImplementedError()
def test_oauth_start(self):
- url = api_url_for(
- 'oauth_connect',
- service_name=self.ADDON_SHORT_NAME
- )
+ url = api_url_for("oauth_connect", service_name=self.ADDON_SHORT_NAME)
res = self.app.get(url, auth=self.user.auth)
assert res.status_code == http_status.HTTP_302_FOUND
redirect_url = urlparse(res.location)
@@ -46,45 +42,42 @@ def test_oauth_start(self):
provider_url = urlparse(self.Provider().auth_url)
provider_params = parse_qs(provider_url.query)
for param, value in redirect_params.items():
- if param == 'state': # state may change between calls
+ if param == "state": # state may change between calls
continue
assert value == provider_params[param]
def test_oauth_finish(self):
- url = web_url_for(
- 'oauth_callback',
- service_name=self.ADDON_SHORT_NAME
- )
- with mock.patch.object(self.Provider, 'auth_callback') as mock_callback:
+ url = web_url_for("oauth_callback", service_name=self.ADDON_SHORT_NAME)
+ with mock.patch.object(
+ self.Provider, "auth_callback"
+ ) as mock_callback:
mock_callback.return_value = True
res = self.app.get(url, auth=self.user.auth)
assert res.status_code == http_status.HTTP_200_OK
name, args, kwargs = mock_callback.mock_calls[0]
- assert kwargs['user']._id == self.user._id
+ assert kwargs["user"]._id == self.user._id
- @mock.patch('website.oauth.views.requests.get')
+ @mock.patch("website.oauth.views.requests.get")
def test_oauth_finish_enable_gv(self, mock_requests_get):
- url = web_url_for(
- 'oauth_callback',
- service_name=self.ADDON_SHORT_NAME
- )
+ url = web_url_for("oauth_callback", service_name=self.ADDON_SHORT_NAME)
query_params = {
- 'code': 'somecode',
- 'state': 'somestatetoken',
+ "code": "somecode",
+ "state": "somestatetoken",
}
with override_flag(ENABLE_GV, active=True):
- request_url = urlunparse(urlparse(url)._replace(query=urlencode(query_params)))
+ request_url = urlunparse(
+ urlparse(url)._replace(query=urlencode(query_params))
+ )
res = self.app.get(request_url, auth=self.user.auth)
gv_callback_url = mock_requests_get.call_args[0][0]
parsed_callback_url = urlparse(gv_callback_url)
assert parsed_callback_url.netloc == urlparse(GRAVYVALET_URL).netloc
- assert parsed_callback_url.path == '/v1/oauth/callback'
+ assert parsed_callback_url.path == "/v1/oauth/callback"
assert dict(parse_qsl(parsed_callback_url.query)) == query_params
def test_delete_external_account(self):
url = api_url_for(
- 'oauth_disconnect',
- external_account_id=self.external_account._id
+ "oauth_disconnect", external_account_id=self.external_account._id
)
res = self.app.delete(url, auth=self.user.auth)
assert res.status_code == http_status.HTTP_200_OK
@@ -96,15 +89,13 @@ def test_delete_external_account(self):
def test_delete_external_account_not_owner(self):
other_user = AuthUserFactory()
url = api_url_for(
- 'oauth_disconnect',
- external_account_id=self.external_account._id
+ "oauth_disconnect", external_account_id=self.external_account._id
)
res = self.app.delete(url, auth=other_user.auth)
assert res.status_code == http_status.HTTP_403_FORBIDDEN
class OAuthAddonConfigViewsTestCaseMixin(OAuthAddonTestCaseMixin):
-
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.node_settings = None
@@ -119,7 +110,9 @@ def ExternalAccountFactory(self):
@property
def folder(self):
- raise NotImplementedError("This test suite must expose a 'folder' property.")
+ raise NotImplementedError(
+ "This test suite must expose a 'folder' property."
+ )
@property
def Serializer(self):
@@ -135,20 +128,22 @@ def test_import_auth(self):
self.user.save()
node = ProjectFactory(creator=self.user)
- node_settings = node.get_or_add_addon(self.ADDON_SHORT_NAME, auth=Auth(self.user))
+ node_settings = node.get_or_add_addon(
+ self.ADDON_SHORT_NAME, auth=Auth(self.user)
+ )
node.save()
- url = node.api_url_for(f'{self.ADDON_SHORT_NAME}_import_auth')
- res = self.app.put(url, json={
- 'external_account_id': ea._id
- }, auth=self.user.auth)
+ url = node.api_url_for(f"{self.ADDON_SHORT_NAME}_import_auth")
+ res = self.app.put(
+ url, json={"external_account_id": ea._id}, auth=self.user.auth
+ )
assert res.status_code == http_status.HTTP_200_OK
- assert 'result' in res.json
+ assert "result" in res.json
node_settings.reload()
assert node_settings.external_account._id == ea._id
node.reload()
last_log = node.logs.latest()
- assert last_log.action == f'{self.ADDON_SHORT_NAME}_node_authorized'
+ assert last_log.action == f"{self.ADDON_SHORT_NAME}_node_authorized"
def test_import_auth_invalid_account(self):
ea = self.ExternalAccountFactory()
@@ -156,10 +151,12 @@ def test_import_auth_invalid_account(self):
node = ProjectFactory(creator=self.user)
node.add_addon(self.ADDON_SHORT_NAME, auth=self.auth)
node.save()
- url = node.api_url_for(f'{self.ADDON_SHORT_NAME}_import_auth')
- res = self.app.put(url, json={
- 'external_account_id': ea._id
- }, auth=self.user.auth, )
+ url = node.api_url_for(f"{self.ADDON_SHORT_NAME}_import_auth")
+ res = self.app.put(
+ url,
+ json={"external_account_id": ea._id},
+ auth=self.user.auth,
+ )
assert res.status_code == http_status.HTTP_403_FORBIDDEN
def test_import_auth_cant_write_node(self):
@@ -170,71 +167,83 @@ def test_import_auth_cant_write_node(self):
user.save()
node = ProjectFactory(creator=self.user)
- node.add_contributor(user, permissions=permissions.READ, auth=self.auth, save=True)
+ node.add_contributor(
+ user, permissions=permissions.READ, auth=self.auth, save=True
+ )
node.add_addon(self.ADDON_SHORT_NAME, auth=self.auth)
node.save()
- url = node.api_url_for(f'{self.ADDON_SHORT_NAME}_import_auth')
- res = self.app.put(url, json={
- 'external_account_id': ea._id
- }, auth=user.auth, )
+ url = node.api_url_for(f"{self.ADDON_SHORT_NAME}_import_auth")
+ res = self.app.put(
+ url,
+ json={"external_account_id": ea._id},
+ auth=user.auth,
+ )
assert res.status_code == http_status.HTTP_403_FORBIDDEN
def test_set_config(self):
self.node_settings.set_auth(self.external_account, self.user)
- url = self.project.api_url_for(f'{self.ADDON_SHORT_NAME}_set_config')
- res = self.app.put(url, json={
- 'selected': self.folder
- }, auth=self.user.auth)
+ url = self.project.api_url_for(f"{self.ADDON_SHORT_NAME}_set_config")
+ res = self.app.put(
+ url, json={"selected": self.folder}, auth=self.user.auth
+ )
assert res.status_code == http_status.HTTP_200_OK
self.project.reload()
- assert self.project.logs.latest().action == f'{self.ADDON_SHORT_NAME}_folder_selected'
- assert res.json['result']['folder']['path'] == self.folder['path']
+ assert (
+ self.project.logs.latest().action
+ == f"{self.ADDON_SHORT_NAME}_folder_selected"
+ )
+ assert res.json["result"]["folder"]["path"] == self.folder["path"]
def test_get_config(self):
- url = self.project.api_url_for(f'{self.ADDON_SHORT_NAME}_get_config')
- with mock.patch.object(type(self.Serializer()), 'credentials_are_valid', return_value=True):
+ url = self.project.api_url_for(f"{self.ADDON_SHORT_NAME}_get_config")
+ with mock.patch.object(
+ type(self.Serializer()), "credentials_are_valid", return_value=True
+ ):
res = self.app.get(url, auth=self.user.auth)
assert res.status_code == http_status.HTTP_200_OK
- assert 'result' in res.json
+ assert "result" in res.json
serialized = self.Serializer().serialize_settings(
- self.node_settings,
- self.user,
- self.client
+ self.node_settings, self.user, self.client
)
- assert serialized == res.json['result']
+ assert serialized == res.json["result"]
def test_get_config_unauthorized(self):
- url = self.project.api_url_for(f'{self.ADDON_SHORT_NAME}_get_config')
+ url = self.project.api_url_for(f"{self.ADDON_SHORT_NAME}_get_config")
user = AuthUserFactory()
- self.project.add_contributor(user, permissions=permissions.READ, auth=self.auth, save=True)
- res = self.app.get(url, auth=user.auth, )
+ self.project.add_contributor(
+ user, permissions=permissions.READ, auth=self.auth, save=True
+ )
+ res = self.app.get(
+ url,
+ auth=user.auth,
+ )
assert res.status_code == http_status.HTTP_403_FORBIDDEN
def test_get_config_not_logged_in(self):
- url = self.project.api_url_for(f'{self.ADDON_SHORT_NAME}_get_config')
+ url = self.project.api_url_for(f"{self.ADDON_SHORT_NAME}_get_config")
res = self.app.get(url, auth=None)
assert res.status_code == http_status.HTTP_302_FOUND
def test_account_list_single(self):
- url = api_url_for(f'{self.ADDON_SHORT_NAME}_account_list')
+ url = api_url_for(f"{self.ADDON_SHORT_NAME}_account_list")
res = self.app.get(url, auth=self.user.auth)
assert res.status_code == http_status.HTTP_200_OK
- assert 'accounts' in res.json
- assert len(res.json['accounts']) == 1
+ assert "accounts" in res.json
+ assert len(res.json["accounts"]) == 1
def test_account_list_multiple(self):
ea = self.ExternalAccountFactory()
self.user.external_accounts.add(ea)
self.user.save()
- url = api_url_for(f'{self.ADDON_SHORT_NAME}_account_list')
+ url = api_url_for(f"{self.ADDON_SHORT_NAME}_account_list")
res = self.app.get(url, auth=self.user.auth)
assert res.status_code == http_status.HTTP_200_OK
- assert 'accounts' in res.json
- assert len(res.json['accounts']) == 2
+ assert "accounts" in res.json
+ assert len(res.json["accounts"]) == 2
def test_account_list_not_authorized(self):
- url = api_url_for(f'{self.ADDON_SHORT_NAME}_account_list')
+ url = api_url_for(f"{self.ADDON_SHORT_NAME}_account_list")
res = self.app.get(url, auth=None)
assert res.status_code == http_status.HTTP_302_FOUND
@@ -244,13 +253,15 @@ def test_folder_list(self):
# subclass, mock any API calls, and call super.
self.node_settings.set_auth(self.external_account, self.user)
self.node_settings.save()
- url = self.project.api_url_for(f'{self.ADDON_SHORT_NAME}_folder_list')
+ url = self.project.api_url_for(f"{self.ADDON_SHORT_NAME}_folder_list")
res = self.app.get(url, auth=self.user.auth)
assert res.status_code == http_status.HTTP_200_OK
# TODO test result serialization?
def test_deauthorize_node(self):
- url = self.project.api_url_for(f'{self.ADDON_SHORT_NAME}_deauthorize_node')
+ url = self.project.api_url_for(
+ f"{self.ADDON_SHORT_NAME}_deauthorize_node"
+ )
res = self.app.delete(url, auth=self.user.auth)
assert res.status_code == http_status.HTTP_200_OK
self.node_settings.reload()
@@ -260,13 +271,16 @@ def test_deauthorize_node(self):
# A log event was saved
self.project.reload()
last_log = self.project.logs.latest()
- assert last_log.action == f'{self.ADDON_SHORT_NAME}_node_deauthorized'
+ assert last_log.action == f"{self.ADDON_SHORT_NAME}_node_deauthorized"
-class OAuthCitationAddonConfigViewsTestCaseMixin(OAuthAddonConfigViewsTestCaseMixin):
-
+class OAuthCitationAddonConfigViewsTestCaseMixin(
+ OAuthAddonConfigViewsTestCaseMixin
+):
def __init__(self, *args, **kwargs):
- super(OAuthAddonConfigViewsTestCaseMixin,self).__init__(*args, **kwargs)
+ super(OAuthAddonConfigViewsTestCaseMixin, self).__init__(
+ *args, **kwargs
+ )
self.mock_verify = None
self.node_settings = None
self.provider = None
@@ -310,8 +324,7 @@ def mockResponses(self):
def setUp(self):
super().setUp()
self.mock_verify = mock.patch.object(
- self.client,
- '_verify_client_validity'
+ self.client, "_verify_client_validity"
)
self.mock_verify.start()
@@ -320,45 +333,66 @@ def tearDown(self):
super().tearDown()
def test_set_config(self):
- with mock.patch.object(self.client, '_folder_metadata') as mock_metadata:
+ with mock.patch.object(
+ self.client, "_folder_metadata"
+ ) as mock_metadata:
mock_metadata.return_value = self.folder
- url = self.project.api_url_for(f'{self.ADDON_SHORT_NAME}_set_config')
- res = self.app.put(url, json={
- 'external_list_id': self.folder.json['id'],
- 'external_list_name': self.folder.name,
- }, auth=self.user.auth)
+ url = self.project.api_url_for(
+ f"{self.ADDON_SHORT_NAME}_set_config"
+ )
+ res = self.app.put(
+ url,
+ json={
+ "external_list_id": self.folder.json["id"],
+ "external_list_name": self.folder.name,
+ },
+ auth=self.user.auth,
+ )
assert res.status_code == http_status.HTTP_200_OK
self.project.reload()
- assert self.project.logs.latest().action == f'{self.ADDON_SHORT_NAME}_folder_selected'
- assert res.json['result']['folder']['name'] == self.folder.name
+ assert (
+ self.project.logs.latest().action
+ == f"{self.ADDON_SHORT_NAME}_folder_selected"
+ )
+ assert res.json["result"]["folder"]["name"] == self.folder.name
def test_get_config(self):
- with mock.patch.object(self.client, '_folder_metadata') as mock_metadata:
+ with mock.patch.object(
+ self.client, "_folder_metadata"
+ ) as mock_metadata:
mock_metadata.return_value = self.folder
- self.node_settings.api._client = 'client'
+ self.node_settings.api._client = "client"
self.node_settings.save()
- url = self.project.api_url_for(f'{self.ADDON_SHORT_NAME}_get_config')
+ url = self.project.api_url_for(
+ f"{self.ADDON_SHORT_NAME}_get_config"
+ )
res = self.app.get(url, auth=self.user.auth)
assert res.status_code == http_status.HTTP_200_OK
- assert 'result' in res.json
- result = res.json['result']
+ assert "result" in res.json
+ result = res.json["result"]
serialized = self.Serializer(
node_settings=self.node_settings,
- user_settings=self.node_settings.user_settings
+ user_settings=self.node_settings.user_settings,
).serialized_node_settings
- serialized['validCredentials'] = self.citationsProvider().check_credentials(self.node_settings)
+ serialized["validCredentials"] = (
+ self.citationsProvider().check_credentials(self.node_settings)
+ )
assert serialized == result
def test_folder_list(self):
- with mock.patch.object(self.client, '_get_folders'):
+ with mock.patch.object(self.client, "_get_folders"):
self.node_settings.set_auth(self.external_account, self.user)
self.node_settings.save()
- url = self.project.api_url_for(f'{self.ADDON_SHORT_NAME}_citation_list')
+ url = self.project.api_url_for(
+ f"{self.ADDON_SHORT_NAME}_citation_list"
+ )
res = self.app.get(url, auth=self.user.auth)
assert res.status_code == http_status.HTTP_200_OK
def test_check_credentials(self):
- with mock.patch.object(self.client, 'client', new_callable=mock.PropertyMock) as mock_client:
+ with mock.patch.object(
+ self.client, "client", new_callable=mock.PropertyMock
+ ) as mock_client:
self.provider = self.citationsProvider()
mock_client.side_effect = HTTPError(403)
assert not self.provider.check_credentials(self.node_settings)
@@ -372,17 +406,19 @@ def test_widget_view_complete(self):
self.citationsProvider().set_config(
self.node_settings,
self.user,
- self.folder.json['id'],
+ self.folder.json["id"],
self.folder.name,
- Auth(self.user)
+ Auth(self.user),
)
assert self.node_settings.complete
- assert self.node_settings.list_id == 'Fake Key'
+ assert self.node_settings.list_id == "Fake Key"
- res = self.citationsProvider().widget(self.project.get_addon(self.ADDON_SHORT_NAME))
+ res = self.citationsProvider().widget(
+ self.project.get_addon(self.ADDON_SHORT_NAME)
+ )
- assert res['complete']
- assert res['list_id'] == 'Fake Key'
+ assert res["complete"]
+ assert res["list_id"] == "Fake Key"
def test_widget_view_incomplete(self):
# JSON: tell the widget when it hasn't been configured
@@ -391,41 +427,41 @@ def test_widget_view_incomplete(self):
assert not self.node_settings.complete
assert self.node_settings.list_id is None
- res = self.citationsProvider().widget(self.project.get_addon(self.ADDON_SHORT_NAME))
+ res = self.citationsProvider().widget(
+ self.project.get_addon(self.ADDON_SHORT_NAME)
+ )
- assert not res['complete']
- assert res['list_id'] is None
+ assert not res["complete"]
+ assert res["list_id"] is None
@responses.activate
def test_citation_list_root(self):
-
responses.add(
responses.Response(
responses.GET,
self.foldersApiUrl,
- body=self.mockResponses['folders'],
- content_type='application/json'
+ body=self.mockResponses["folders"],
+ content_type="application/json",
)
)
res = self.app.get(
- self.project.api_url_for(f'{self.ADDON_SHORT_NAME}_citation_list'),
- auth=self.user.auth
+ self.project.api_url_for(f"{self.ADDON_SHORT_NAME}_citation_list"),
+ auth=self.user.auth,
)
- root = res.json['contents'][0]
- assert root['kind'] == 'folder'
- assert root['id'] == 'ROOT'
- assert root['parent_list_id'] == '__'
+ root = res.json["contents"][0]
+ assert root["kind"] == "folder"
+ assert root["id"] == "ROOT"
+ assert root["parent_list_id"] == "__"
@responses.activate
def test_citation_list_non_root(self):
-
responses.add(
responses.Response(
responses.GET,
self.foldersApiUrl,
- body=self.mockResponses['folders'],
- content_type='application/json'
+ body=self.mockResponses["folders"],
+ content_type="application/json",
)
)
@@ -433,36 +469,38 @@ def test_citation_list_non_root(self):
responses.Response(
responses.GET,
self.documentsApiUrl,
- body=self.mockResponses['documents'],
- content_type='application/json'
+ body=self.mockResponses["documents"],
+ content_type="application/json",
)
)
res = self.app.get(
- self.project.api_url_for(f'{self.ADDON_SHORT_NAME}_citation_list', list_id='ROOT'),
- auth=self.user.auth
+ self.project.api_url_for(
+ f"{self.ADDON_SHORT_NAME}_citation_list", list_id="ROOT"
+ ),
+ auth=self.user.auth,
)
- children = res.json['contents']
+ children = res.json["contents"]
assert len(children) == 7
- assert children[0]['kind'] == 'folder'
- assert children[1]['kind'] == 'file'
- assert children[1].get('csl') is not None
+ assert children[0]["kind"] == "folder"
+ assert children[1]["kind"] == "file"
+ assert children[1].get("csl") is not None
@responses.activate
def test_citation_list_non_linked_or_child_non_authorizer(self):
non_authorizing_user = AuthUserFactory()
self.project.add_contributor(non_authorizing_user, save=True)
- self.node_settings.list_id = 'e843da05-8818-47c2-8c37-41eebfc4fe3f'
+ self.node_settings.list_id = "e843da05-8818-47c2-8c37-41eebfc4fe3f"
self.node_settings.save()
responses.add(
responses.Response(
responses.GET,
self.foldersApiUrl,
- body=self.mockResponses['folders'],
- content_type='application/json'
+ body=self.mockResponses["folders"],
+ content_type="application/json",
)
)
@@ -470,13 +508,15 @@ def test_citation_list_non_linked_or_child_non_authorizer(self):
responses.Response(
responses.GET,
self.documentsApiUrl,
- body=self.mockResponses['documents'],
- content_type='application/json'
+ body=self.mockResponses["documents"],
+ content_type="application/json",
)
)
res = self.app.get(
- self.project.api_url_for(f'{self.ADDON_SHORT_NAME}_citation_list', list_id='ROOT'),
+ self.project.api_url_for(
+ f"{self.ADDON_SHORT_NAME}_citation_list", list_id="ROOT"
+ ),
auth=non_authorizing_user.auth,
)
assert res.status_code == http_status.HTTP_403_FORBIDDEN
diff --git a/addons/base/utils.py b/addons/base/utils.py
index c86f790fecd..9a64fc38625 100644
--- a/addons/base/utils.py
+++ b/addons/base/utils.py
@@ -6,7 +6,7 @@
def get_mfr_url(target, provider_name):
- if hasattr(target, 'osfstorage_region') and provider_name == 'osfstorage':
+ if hasattr(target, "osfstorage_region") and provider_name == "osfstorage":
return target.osfstorage_region.mfr_url
return MFR_SERVER_URL
@@ -16,42 +16,76 @@ def serialize_addon_config(config, user):
user_addon = user.get_addon(config.short_name)
ret = {
- 'addon_short_name': config.short_name,
- 'addon_full_name': config.full_name,
- 'node_settings_template': lookup.get_template(basename(config.node_settings_template)),
- 'user_settings_template': lookup.get_template(basename(config.user_settings_template)),
- 'is_enabled': user_addon is not None,
- 'addon_icon_url': config.icon_url,
+ "addon_short_name": config.short_name,
+ "addon_full_name": config.full_name,
+ "node_settings_template": lookup.get_template(
+ basename(config.node_settings_template)
+ ),
+ "user_settings_template": lookup.get_template(
+ basename(config.user_settings_template)
+ ),
+ "is_enabled": user_addon is not None,
+ "addon_icon_url": config.icon_url,
}
ret.update(user_addon.to_json(user) if user_addon else {})
return ret
+
def get_addons_by_config_type(config_type, user):
- addons = [addon for addon in settings.ADDONS_AVAILABLE if config_type in addon.configs]
- return [serialize_addon_config(addon_config, user) for addon_config in sorted(addons, key=lambda cfg: cfg.full_name.lower())]
+ addons = [
+ addon
+ for addon in settings.ADDONS_AVAILABLE
+ if config_type in addon.configs
+ ]
+ return [
+ serialize_addon_config(addon_config, user)
+ for addon_config in sorted(
+ addons, key=lambda cfg: cfg.full_name.lower()
+ )
+ ]
+
def format_last_known_metadata(auth, node, file, error_type):
msg = """
""" # None is default
- if error_type != 'FILE_SUSPENDED' and ((auth.user and node.is_contributor_or_group_member(auth.user)) or
- (auth.private_key and auth.private_key in node.private_link_keys_active)):
+ if error_type != "FILE_SUSPENDED" and (
+ (auth.user and node.is_contributor_or_group_member(auth.user))
+ or (
+ auth.private_key
+ and auth.private_key in node.private_link_keys_active
+ )
+ ):
last_meta = file.last_known_metadata
- last_seen = last_meta.get('last_seen', None)
- hashes = last_meta.get('hashes', None)
- path = last_meta.get('path', None)
- size = last_meta.get('size', None)
+ last_seen = last_meta.get("last_seen", None)
+ hashes = last_meta.get("hashes", None)
+ path = last_meta.get("path", None)
+ size = last_meta.get("size", None)
parts = [
- """This file was """ if last_seen or hashes or path or size else '',
- """last seen on {} UTC """.format(last_seen.strftime('%c')) if last_seen else '',
- f"""and found at path {markupsafe.escape(path)} """ if last_seen and path else '',
- f"""last found at path {markupsafe.escape(path)} """ if not last_seen and path else '',
- f"""with a file size of {size} bytes""" if size and (last_seen or path) else '',
- f"""last seen with a file size of {size} bytes""" if size and not (last_seen or path) else '',
- """.""" if last_seen or hashes or path or size else '',
+ """This file was """
+ if last_seen or hashes or path or size
+ else "",
+ """last seen on {} UTC """.format(last_seen.strftime("%c"))
+ if last_seen
+ else "",
+ f"""and found at path {markupsafe.escape(path)} """
+ if last_seen and path
+ else "",
+ f"""last found at path {markupsafe.escape(path)} """
+ if not last_seen and path
+ else "",
+ f"""with a file size of {size} bytes"""
+ if size and (last_seen or path)
+ else "",
+ f"""last seen with a file size of {size} bytes"""
+ if size and not (last_seen or path)
+ else "",
+ """.""" if last_seen or hashes or path or size else "",
"""Hashes of last seen version:
{}
""".format(
- ''.join([f'{k}: {v}' for k, v in hashes.items()])
- ) if hashes else '', # TODO: Format better for UI
- msg
+ "".join([f"{k}: {v}" for k, v in hashes.items()])
+ )
+ if hashes
+ else "", # TODO: Format better for UI
+ msg,
]
- return ''.join(parts)
+ return "".join(parts)
return msg
diff --git a/addons/base/views.py b/addons/base/views.py
index 6fea2444421..a63ea2d6960 100644
--- a/addons/base/views.py
+++ b/addons/base/views.py
@@ -28,7 +28,11 @@
from framework.auth import Auth
from framework.auth import cas
from framework.auth import oauth_scopes
-from framework.auth.decorators import collect_auth, must_be_logged_in, must_be_signed
+from framework.auth.decorators import (
+ collect_auth,
+ must_be_logged_in,
+ must_be_signed,
+)
from framework.exceptions import HTTPError
from framework.flask import redirect
from framework.sentry import log_exception
@@ -53,21 +57,26 @@
DraftRegistration,
Guid,
FileVersionUserMetadata,
- FileVersion
+ FileVersion,
)
from osf.metrics import PreprintView, PreprintDownload
from osf.utils import permissions
from osf.utils.requests import requests_retry_session
from website.profile.utils import get_profile_image_url
from website.project import decorators
-from website.project.decorators import must_be_contributor_or_public, must_be_valid_project, check_contributor_auth
+from website.project.decorators import (
+ must_be_contributor_or_public,
+ must_be_valid_project,
+ check_contributor_auth,
+)
from website.project.utils import serialize_node
from website.util import rubeus
# import so that associated listener is instantiated and gets emails
from website.notifications.events.files import FileEvent # noqa
-ERROR_MESSAGES = {'FILE_GONE': """
+ERROR_MESSAGES = {
+ "FILE_GONE": """
@@ -78,7 +87,7 @@
It was deleted by {deleted_by} on {deleted_on}.
""",
- 'FILE_GONE_ACTOR_UNKNOWN': """
+ "FILE_GONE_ACTOR_UNKNOWN": """
@@ -89,7 +98,7 @@
It was deleted on {deleted_on}.
""",
- 'DONT_KNOW': """
+ "DONT_KNOW": """
@@ -97,7 +106,7 @@
File not found at {provider}.
""",
- 'BLAME_PROVIDER': """
+ "BLAME_PROVIDER": """
@@ -109,36 +118,39 @@
You may wish to verify this through {provider}'s website.
""",
- 'FILE_SUSPENDED': """
+ "FILE_SUSPENDED": """
-This content has been removed."""}
+This content has been removed.""",
+}
-WATERBUTLER_JWE_KEY = jwe.kdf(settings.WATERBUTLER_JWE_SECRET.encode('utf-8'), settings.WATERBUTLER_JWE_SALT.encode('utf-8'))
+WATERBUTLER_JWE_KEY = jwe.kdf(
+ settings.WATERBUTLER_JWE_SECRET.encode("utf-8"),
+ settings.WATERBUTLER_JWE_SALT.encode("utf-8"),
+)
@decorators.must_have_permission(permissions.WRITE)
@decorators.must_not_be_registration
def disable_addon(auth, **kwargs):
- node = kwargs['node'] or kwargs['project']
+ node = kwargs["node"] or kwargs["project"]
- addon_name = kwargs.get('addon')
+ addon_name = kwargs.get("addon")
if addon_name is None:
raise HTTPError(http_status.HTTP_400_BAD_REQUEST)
deleted = node.delete_addon(addon_name, auth)
- return {'deleted': deleted}
+ return {"deleted": deleted}
@must_be_logged_in
def get_addon_user_config(**kwargs):
+ user = kwargs["auth"].user
- user = kwargs['auth'].user
-
- addon_name = kwargs.get('addon')
+ addon_name = kwargs.get("addon")
if addon_name is None:
raise HTTPError(http_status.HTTP_400_BAD_REQUEST)
@@ -150,20 +162,20 @@ def get_addon_user_config(**kwargs):
permission_map = {
- 'create_folder': permissions.WRITE,
- 'revisions': permissions.READ,
- 'metadata': permissions.READ,
- 'download': permissions.READ,
- 'render': permissions.READ,
- 'export': permissions.READ,
- 'upload': permissions.WRITE,
- 'delete': permissions.WRITE,
- 'copy': permissions.WRITE,
- 'move': permissions.WRITE,
- 'copyto': permissions.WRITE,
- 'moveto': permissions.WRITE,
- 'copyfrom': permissions.READ,
- 'movefrom': permissions.WRITE,
+ "create_folder": permissions.WRITE,
+ "revisions": permissions.READ,
+ "metadata": permissions.READ,
+ "download": permissions.READ,
+ "render": permissions.READ,
+ "export": permissions.READ,
+ "upload": permissions.WRITE,
+ "delete": permissions.WRITE,
+ "copy": permissions.WRITE,
+ "move": permissions.WRITE,
+ "copyto": permissions.WRITE,
+ "moveto": permissions.WRITE,
+ "copyfrom": permissions.READ,
+ "movefrom": permissions.WRITE,
}
@@ -171,7 +183,10 @@ def get_permission_for_action(action):
"""Retrieve the permission level required for a given action."""
permission = permission_map.get(action)
if not permission:
- raise HTTPError(http_status.HTTP_400_BAD_REQUEST, message='Invalid action specified.')
+ raise HTTPError(
+ http_status.HTTP_400_BAD_REQUEST,
+ message="Invalid action specified.",
+ )
return permission
@@ -192,23 +207,33 @@ def check_resource_permissions(resource, auth, action):
required_permission = get_permission_for_action(action)
if isinstance(resource, Registration):
- return _check_registration_permissions(resource, auth, required_permission, action)
+ return _check_registration_permissions(
+ resource, auth, required_permission, action
+ )
elif isinstance(resource, Node):
- return _check_node_permissions(resource, auth, required_permission, action)
+ return _check_node_permissions(
+ resource, auth, required_permission, action
+ )
elif isinstance(resource, Preprint):
return _check_preprint_permissions(resource, auth, required_permission)
elif isinstance(resource, DraftNode):
draft_registration = resource.registered_draft.first()
- return _check_draft_registration_permissions(draft_registration, auth, required_permission)
+ return _check_draft_registration_permissions(
+ draft_registration, auth, required_permission
+ )
else:
raise NotImplementedError()
def _check_registration_permissions(registration, auth, permission, action):
if permission == permissions.READ:
- return registration.can_view(auth) or registration.registered_from.can_view(auth)
- if action in ('copyfrom', 'upload'):
- return _check_hierarchical_write_permissions(resource=registration, auth=auth)
+ return registration.can_view(
+ auth
+ ) or registration.registered_from.can_view(auth)
+ if action in ("copyfrom", "upload"):
+ return _check_hierarchical_write_permissions(
+ resource=registration, auth=auth
+ )
return registration.can_edit(auth)
@@ -217,7 +242,7 @@ def _check_node_permissions(node, auth, permission, action):
return node.can_view(auth)
if node.can_edit(auth):
return True
- if action == 'copyfrom':
+ if action == "copyfrom":
return _check_hierarchical_write_permissions(resource=node, auth=auth)
@@ -227,7 +252,9 @@ def _check_preprint_permissions(preprint, auth, permission):
return preprint.can_edit(auth)
-def _check_draft_registration_permissions(draft_registration, auth, permission):
+def _check_draft_registration_permissions(
+ draft_registration, auth, permission
+):
if permission == permissions.READ:
return draft_registration.can_view(auth)
return draft_registration.can_edit(auth)
@@ -241,31 +268,35 @@ def _check_hierarchical_write_permissions(resource, auth):
permissions_resource = permissions_resource.parent_node
return False
+
def _download_is_from_mfr(waterbutler_data):
- metrics_data = waterbutler_data['metrics']
- uri = metrics_data['uri']
- is_render_uri = furl(uri or '').query.params.get('mode') == 'render'
+ metrics_data = waterbutler_data["metrics"]
+ uri = metrics_data["uri"]
+ is_render_uri = furl(uri or "").query.params.get("mode") == "render"
return (
# This header is sent for download requests that
# originate from MFR, e.g. for the code pygments renderer
- request.headers.get('X-Cos-Mfr-Render-Request', None) or
+ request.headers.get("X-Cos-Mfr-Render-Request", None)
+ or
# Need to check the URI in order to account
# for renderers that send XHRs from the
# rendered content, e.g. PDFs
is_render_uri
)
+
def make_auth(user):
if user is not None:
return {
- 'id': user._id,
- 'email': f'{user._id}@osf.io',
- 'name': user.fullname,
+ "id": user._id,
+ "email": f"{user._id}@osf.io",
+ "name": user.fullname,
}
return {}
+
def authenticate_via_oauth_bearer_token(resource, action):
- authorization = request.headers.get('Authorization')
+ authorization = request.headers.get("Authorization")
client = cas.get_client()
try:
@@ -273,7 +304,9 @@ def authenticate_via_oauth_bearer_token(resource, action):
cas_resp = client.profile(access_token)
except cas.CasError as err:
sentry.log_exception(err)
- return json_renderer(err) # Assuming json_renderer wraps the error in a Response
+ return json_renderer(
+ err
+ ) # Assuming json_renderer wraps the error in a Response
permission = get_permission_for_action(action)
if permission == permissions.READ:
@@ -283,11 +316,13 @@ def authenticate_via_oauth_bearer_token(resource, action):
else:
required_scope = resource.file_write_scope
- token = cas_resp.attributes.get('accessTokenScope')
+ token = cas_resp.attributes.get("accessTokenScope")
if not token or not cas_resp.authenticated:
raise HTTPError(http_status.HTTP_403_FORBIDDEN)
- normalize_scopes = oauth_scopes.normalize_scopes(cas_resp.attributes['accessTokenScope'])
+ normalize_scopes = oauth_scopes.normalize_scopes(
+ cas_resp.attributes["accessTokenScope"]
+ )
if required_scope not in normalize_scopes:
raise HTTPError(http_status.HTTP_403_FORBIDDEN)
@@ -296,14 +331,14 @@ def authenticate_via_oauth_bearer_token(resource, action):
def decrypt_and_decode_jwt_payload():
try:
- payload_encrypted = request.args.get('payload', '').encode('utf-8')
+ payload_encrypted = request.args.get("payload", "").encode("utf-8")
payload_decrypted = jwe.decrypt(payload_encrypted, WATERBUTLER_JWE_KEY)
return jwt.decode(
payload_decrypted,
settings.WATERBUTLER_JWT_SECRET,
- options={'require': ['exp']},
+ options={"require": ["exp"]},
algorithms=[settings.WATERBUTLER_JWT_ALGORITHM],
- )['data']
+ )["data"]
except (jwt.InvalidTokenError, KeyError) as err:
sentry.log_message(str(err))
raise HTTPError(http_status.HTTP_403_FORBIDDEN)
@@ -312,32 +347,41 @@ def decrypt_and_decode_jwt_payload():
def get_authenticated_resource(resource_id):
resource = AbstractNode.load(resource_id) or Preprint.load(resource_id)
if not resource:
- raise HTTPError(http_status.HTTP_404_NOT_FOUND, message='Resource not found.')
+ raise HTTPError(
+ http_status.HTTP_404_NOT_FOUND, message="Resource not found."
+ )
if resource.deleted:
- raise HTTPError(http_status.HTTP_410_GONE, message='Resource has been deleted.')
+ raise HTTPError(
+ http_status.HTTP_410_GONE, message="Resource has been deleted."
+ )
return resource
-def _get_osfstorage_file_version(file_node: OsfStorageFileNode, version_string: str = None) -> FileVersion:
+def _get_osfstorage_file_version(
+ file_node: OsfStorageFileNode, version_string: str = None
+) -> FileVersion:
if not (file_node and file_node.is_file):
return None
try:
- return FileVersion.objects.select_related('region').get(
+ return FileVersion.objects.select_related("region").get(
basefilenode=file_node,
- identifier=version_string or str(file_node.versions.count())
+ identifier=version_string or str(file_node.versions.count()),
)
except FileVersion.DoesNotExist:
- raise HTTPError(http_status.HTTP_400_BAD_REQUEST, 'Requested File Version unavailable')
+ raise HTTPError(
+ http_status.HTTP_400_BAD_REQUEST,
+ "Requested File Version unavailable",
+ )
def _get_osfstorage_file_node(file_path: str) -> OsfStorageFileNode:
if not file_path:
return None
- file_id = file_path.strip('/')
+ file_id = file_path.strip("/")
return OsfStorageFileNode.load(file_id)
@@ -345,14 +389,18 @@ def authenticate_user_if_needed(auth, waterbutler_data, resource):
if auth.user:
return # User is already authenticated
- if 'cookie' in waterbutler_data:
- auth.user = OSFUser.from_cookie(waterbutler_data.get('cookie'))
+ if "cookie" in waterbutler_data:
+ auth.user = OSFUser.from_cookie(waterbutler_data.get("cookie"))
if not auth.user:
- raise HTTPError(http_status.HTTP_401_UNAUTHORIZED, 'Invalid user cookie.')
+ raise HTTPError(
+ http_status.HTTP_401_UNAUTHORIZED, "Invalid user cookie."
+ )
- authorization = request.headers.get('Authorization')
- if authorization and authorization.startswith('Bearer '):
- auth.user = authenticate_via_oauth_bearer_token(resource, waterbutler_data['action'])
+ authorization = request.headers.get("Authorization")
+ if authorization and authorization.startswith("Bearer "):
+ auth.user = authenticate_via_oauth_bearer_token(
+ resource, waterbutler_data["action"]
+ )
@collect_auth
@@ -387,19 +435,19 @@ def get_auth(auth, **kwargs):
waterbutler_data = decrypt_and_decode_jwt_payload()
# Authenticate the resource based on the node_id and handle potential draft nodes
- resource = get_authenticated_resource(waterbutler_data['nid'])
+ resource = get_authenticated_resource(waterbutler_data["nid"])
# Authenticate the user using cookie or Oauth if possible
authenticate_user_if_needed(auth, waterbutler_data, resource)
# Verify the user has permission to perform the requested action on the node
- action = waterbutler_data['action']
+ action = waterbutler_data["action"]
if not check_resource_permissions(resource, auth, action):
raise HTTPError(http_status.HTTP_403_FORBIDDEN)
# Validate provider, exclude Preprints that don't have `get_addon`.
if not isinstance(resource, Preprint):
- provider = resource.get_addon(waterbutler_data['provider'])
+ provider = resource.get_addon(waterbutler_data["provider"])
if not provider:
raise HTTPError(http_status.HTTP_400_BAD_REQUEST)
else:
@@ -409,36 +457,36 @@ def get_auth(auth, **kwargs):
file_node = None
fileversion = None
- if waterbutler_data['provider'] == 'osfstorage':
- file_node = _get_osfstorage_file_node(waterbutler_data.get('path'))
- fileversion = _get_osfstorage_file_version(file_node, waterbutler_data.get('version'))
+ if waterbutler_data["provider"] == "osfstorage":
+ file_node = _get_osfstorage_file_node(waterbutler_data.get("path"))
+ fileversion = _get_osfstorage_file_version(
+ file_node, waterbutler_data.get("version")
+ )
# Fetch Waterbutler credentials and settings for the resource
credentials, waterbutler_settings = get_waterbutler_data(
- resource,
- waterbutler_data,
- fileversion,
- provider
+ resource, waterbutler_data, fileversion, provider
)
if fileversion:
# Trigger any file-specific signals based on the action taken (e.g., file viewed, downloaded)
- if action == 'render':
- file_signals.file_viewed.send(auth=auth, fileversion=fileversion, file_node=file_node)
- elif action == 'download' and not _download_is_from_mfr(waterbutler_data):
- file_signals.file_downloaded.send(auth=auth, fileversion=fileversion, file_node=file_node)
+ if action == "render":
+ file_signals.file_viewed.send(
+ auth=auth, fileversion=fileversion, file_node=file_node
+ )
+ elif action == "download" and not _download_is_from_mfr(
+ waterbutler_data
+ ):
+ file_signals.file_downloaded.send(
+ auth=auth, fileversion=fileversion, file_node=file_node
+ )
# Construct the response payload including the JWT
- return construct_payload(
- auth,
- resource,
- credentials,
- waterbutler_settings
- )
+ return construct_payload(auth, resource, credentials, waterbutler_settings)
def get_waterbutler_data(resource, waterbutler_data, fileversion, provider):
- provider_name = waterbutler_data.get('provider')
+ provider_name = waterbutler_data.get("provider")
if isinstance(resource, Preprint):
credentials = resource.serialize_waterbutler_credentials()
waterbutler_settings = resource.serialize_waterbutler_settings()
@@ -450,234 +498,266 @@ def get_waterbutler_data(resource, waterbutler_data, fileversion, provider):
)
elif waffle.flag_is_active(request, features.ENABLE_GV):
data = requests_retry_session(
- f'{settings.DOMAIN}/v1/configured_storage_addon/{provider_name}/waterbutler-config'
+ f"{settings.DOMAIN}/v1/configured_storage_addon/{provider_name}/waterbutler-config"
)
- credentials, waterbutler_settings = data['data']
+ credentials, waterbutler_settings = data["data"]
else:
- credentials = resource.serialize_waterbutler_credentials(provider.short_name)
- waterbutler_settings = resource.serialize_waterbutler_settings(provider.short_name)
+ credentials = resource.serialize_waterbutler_credentials(
+ provider.short_name
+ )
+ waterbutler_settings = resource.serialize_waterbutler_settings(
+ provider.short_name
+ )
return credentials, waterbutler_settings
def construct_payload(auth, resource, credentials, waterbutler_settings):
-
if isinstance(resource, Registration):
callback_url = resource.api_url_for(
- 'registration_callbacks',
- _absolute=True,
- _internal=True
+ "registration_callbacks", _absolute=True, _internal=True
)
else:
callback_url = resource.api_url_for(
- 'create_waterbutler_log',
- _absolute=True,
- _internal=True
+ "create_waterbutler_log", _absolute=True, _internal=True
)
# Construct the data dictionary for JWT encoding
jwt_data = {
- 'exp': timezone.now() + datetime.timedelta(seconds=settings.WATERBUTLER_JWT_EXPIRATION),
- 'data': {
- 'auth': make_auth(auth.user),
- 'credentials': credentials,
- 'settings': waterbutler_settings,
- 'callback_url': callback_url
- }
+ "exp": timezone.now()
+ + datetime.timedelta(seconds=settings.WATERBUTLER_JWT_EXPIRATION),
+ "data": {
+ "auth": make_auth(auth.user),
+ "credentials": credentials,
+ "settings": waterbutler_settings,
+ "callback_url": callback_url,
+ },
}
# JWT encode the data
encoded_jwt = jwt.encode(
jwt_data,
settings.WATERBUTLER_JWT_SECRET,
- algorithm=settings.WATERBUTLER_JWT_ALGORITHM
+ algorithm=settings.WATERBUTLER_JWT_ALGORITHM,
)
# Encrypt the encoded JWT with JWE
decoded_encrypted_jwt = jwe.encrypt(
- encoded_jwt.encode(),
- WATERBUTLER_JWE_KEY
+ encoded_jwt.encode(), WATERBUTLER_JWE_KEY
).decode()
- return {'payload': decoded_encrypted_jwt}
+ return {"payload": decoded_encrypted_jwt}
LOG_ACTION_MAP = {
- 'move': NodeLog.FILE_MOVED,
- 'copy': NodeLog.FILE_COPIED,
- 'rename': NodeLog.FILE_RENAMED,
- 'create': NodeLog.FILE_ADDED,
- 'update': NodeLog.FILE_UPDATED,
- 'delete': NodeLog.FILE_REMOVED,
- 'create_folder': NodeLog.FOLDER_CREATED,
+ "move": NodeLog.FILE_MOVED,
+ "copy": NodeLog.FILE_COPIED,
+ "rename": NodeLog.FILE_RENAMED,
+ "create": NodeLog.FILE_ADDED,
+ "update": NodeLog.FILE_UPDATED,
+ "delete": NodeLog.FILE_REMOVED,
+ "create_folder": NodeLog.FOLDER_CREATED,
}
DOWNLOAD_ACTIONS = {
- 'download_file',
- 'download_zip',
+ "download_file",
+ "download_zip",
}
+
@must_be_signed
@no_auto_transaction
@must_be_valid_project(quickfiles_valid=True, preprints_valid=True)
def create_waterbutler_log(payload, **kwargs):
with transaction.atomic():
try:
- auth = payload['auth']
+ auth = payload["auth"]
# Don't log download actions
- if payload['action'] in DOWNLOAD_ACTIONS:
- guid = Guid.load(payload['metadata'].get('nid'))
+ if payload["action"] in DOWNLOAD_ACTIONS:
+ guid = Guid.load(payload["metadata"].get("nid"))
if guid:
node = guid.referent
- return {'status': 'success'}
+ return {"status": "success"}
- user = OSFUser.load(auth['id'])
+ user = OSFUser.load(auth["id"])
if user is None:
raise HTTPError(http_status.HTTP_400_BAD_REQUEST)
- action = LOG_ACTION_MAP[payload['action']]
+ action = LOG_ACTION_MAP[payload["action"]]
except KeyError:
raise HTTPError(http_status.HTTP_400_BAD_REQUEST)
auth = Auth(user=user)
- node = kwargs.get('node') or kwargs.get('project') or Preprint.load(kwargs.get('nid')) or Preprint.load(kwargs.get('pid'))
+ node = (
+ kwargs.get("node")
+ or kwargs.get("project")
+ or Preprint.load(kwargs.get("nid"))
+ or Preprint.load(kwargs.get("pid"))
+ )
if action in (NodeLog.FILE_MOVED, NodeLog.FILE_COPIED):
-
- for bundle in ('source', 'destination'):
- for key in ('provider', 'materialized', 'name', 'nid'):
+ for bundle in ("source", "destination"):
+ for key in ("provider", "materialized", "name", "nid"):
if key not in payload[bundle]:
raise HTTPError(http_status.HTTP_400_BAD_REQUEST)
- dest = payload['destination']
- src = payload['source']
+ dest = payload["destination"]
+ src = payload["source"]
if src is not None and dest is not None:
- dest_path = dest['materialized']
- src_path = src['materialized']
- if dest_path.endswith('/') and src_path.endswith('/'):
+ dest_path = dest["materialized"]
+ src_path = src["materialized"]
+ if dest_path.endswith("/") and src_path.endswith("/"):
dest_path = os.path.dirname(dest_path)
src_path = os.path.dirname(src_path)
if (
- os.path.split(dest_path)[0] == os.path.split(src_path)[0] and
- dest['provider'] == src['provider'] and
- dest['nid'] == src['nid'] and
- dest['name'] != src['name']
+ os.path.split(dest_path)[0] == os.path.split(src_path)[0]
+ and dest["provider"] == src["provider"]
+ and dest["nid"] == src["nid"]
+ and dest["name"] != src["name"]
):
- action = LOG_ACTION_MAP['rename']
+ action = LOG_ACTION_MAP["rename"]
destination_node = node # For clarity
- source_node = AbstractNode.load(src['nid']) or Preprint.load(src['nid'])
+ source_node = AbstractNode.load(src["nid"]) or Preprint.load(
+ src["nid"]
+ )
# We return provider fullname so we need to load node settings, if applicable
source = None
- if hasattr(source_node, 'get_addon'):
- source = source_node.get_addon(payload['source']['provider'])
+ if hasattr(source_node, "get_addon"):
+ source = source_node.get_addon(payload["source"]["provider"])
destination = None
- if hasattr(node, 'get_addon'):
- destination = node.get_addon(payload['destination']['provider'])
-
- payload['source'].update({
- 'materialized': payload['source']['materialized'].lstrip('/'),
- 'addon': source.config.full_name if source else 'osfstorage',
- 'url': source_node.web_url_for(
- 'addon_view_or_download_file',
- path=payload['source']['path'].lstrip('/'),
- provider=payload['source']['provider']
- ),
- 'node': {
- '_id': source_node._id,
- 'url': source_node.url,
- 'title': source_node.title,
+ if hasattr(node, "get_addon"):
+ destination = node.get_addon(
+ payload["destination"]["provider"]
+ )
+
+ payload["source"].update(
+ {
+ "materialized": payload["source"]["materialized"].lstrip(
+ "/"
+ ),
+ "addon": source.config.full_name
+ if source
+ else "osfstorage",
+ "url": source_node.web_url_for(
+ "addon_view_or_download_file",
+ path=payload["source"]["path"].lstrip("/"),
+ provider=payload["source"]["provider"],
+ ),
+ "node": {
+ "_id": source_node._id,
+ "url": source_node.url,
+ "title": source_node.title,
+ },
}
- })
-
- payload['destination'].update({
- 'materialized': payload['destination']['materialized'].lstrip('/'),
- 'addon': destination.config.full_name if destination else 'osfstorage',
- 'url': destination_node.web_url_for(
- 'addon_view_or_download_file',
- path=payload['destination']['path'].lstrip('/'),
- provider=payload['destination']['provider']
- ),
- 'node': {
- '_id': destination_node._id,
- 'url': destination_node.url,
- 'title': destination_node.title,
+ )
+
+ payload["destination"].update(
+ {
+ "materialized": payload["destination"][
+ "materialized"
+ ].lstrip("/"),
+ "addon": destination.config.full_name
+ if destination
+ else "osfstorage",
+ "url": destination_node.web_url_for(
+ "addon_view_or_download_file",
+ path=payload["destination"]["path"].lstrip("/"),
+ provider=payload["destination"]["provider"],
+ ),
+ "node": {
+ "_id": destination_node._id,
+ "url": destination_node.url,
+ "title": destination_node.title,
+ },
}
- })
+ )
- if not payload.get('errors'):
+ if not payload.get("errors"):
destination_node.add_log(
- action=action,
- auth=auth,
- params=payload
+ action=action, auth=auth, params=payload
)
- if payload.get('email') is True or payload.get('errors'):
+ if payload.get("email") is True or payload.get("errors"):
mails.send_mail(
user.username,
- mails.FILE_OPERATION_FAILED if payload.get('errors')
+ mails.FILE_OPERATION_FAILED
+ if payload.get("errors")
else mails.FILE_OPERATION_SUCCESS,
- action=payload['action'],
+ action=payload["action"],
source_node=source_node,
destination_node=destination_node,
- source_path=payload['source']['materialized'],
- source_addon=payload['source']['addon'],
- destination_addon=payload['destination']['addon'],
- osf_support_email=settings.OSF_SUPPORT_EMAIL
+ source_path=payload["source"]["materialized"],
+ source_addon=payload["source"]["addon"],
+ destination_addon=payload["destination"]["addon"],
+ osf_support_email=settings.OSF_SUPPORT_EMAIL,
)
- if payload.get('errors'):
+ if payload.get("errors"):
# Action failed but our function succeeded
# Bail out to avoid file_signals
- return {'status': 'success'}
+ return {"status": "success"}
else:
node.create_waterbutler_log(auth, action, payload)
- metadata = payload.get('metadata') or payload.get('destination')
+ metadata = payload.get("metadata") or payload.get("destination")
- target_node = AbstractNode.load(metadata.get('nid'))
- if target_node and not target_node.is_quickfiles and payload['action'] != 'download_file':
+ target_node = AbstractNode.load(metadata.get("nid"))
+ if (
+ target_node
+ and not target_node.is_quickfiles
+ and payload["action"] != "download_file"
+ ):
update_storage_usage_with_size(payload)
with transaction.atomic():
- file_signals.file_updated.send(target=node, user=user, event_type=action, payload=payload)
+ file_signals.file_updated.send(
+ target=node, user=user, event_type=action, payload=payload
+ )
- return {'status': 'success'}
+ return {"status": "success"}
@file_signals.file_updated.connect
def addon_delete_file_node(self, target, user, event_type, payload):
- """ Get addon BaseFileNode(s), move it into the TrashedFileNode collection
+ """Get addon BaseFileNode(s), move it into the TrashedFileNode collection
and remove it from StoredFileNode.
Required so that the guids of deleted addon files are not re-pointed when an
addon file or folder is moved or renamed.
"""
- if event_type == 'file_removed' and payload.get('provider', None) != 'osfstorage':
- provider = payload['provider']
- path = payload['metadata']['path']
- materialized_path = payload['metadata']['materialized']
+ if (
+ event_type == "file_removed"
+ and payload.get("provider", None) != "osfstorage"
+ ):
+ provider = payload["provider"]
+ path = payload["metadata"]["path"]
+ materialized_path = payload["metadata"]["materialized"]
content_type = ContentType.objects.get_for_model(target)
- if path.endswith('/'):
- folder_children = BaseFileNode.resolve_class(provider, BaseFileNode.ANY).objects.filter(
+ if path.endswith("/"):
+ folder_children = BaseFileNode.resolve_class(
+ provider, BaseFileNode.ANY
+ ).objects.filter(
provider=provider,
target_object_id=target.id,
target_content_type=content_type,
- _materialized_path__startswith=materialized_path
+ _materialized_path__startswith=materialized_path,
)
for item in folder_children:
- if item.kind == 'file' and not TrashedFileNode.load(item._id):
+ if item.kind == "file" and not TrashedFileNode.load(item._id):
item.delete(user=user)
- elif item.kind == 'folder':
+ elif item.kind == "folder":
BaseFileNode.delete(item)
else:
try:
- file_node = BaseFileNode.resolve_class(provider, BaseFileNode.FILE).objects.get(
+ file_node = BaseFileNode.resolve_class(
+ provider, BaseFileNode.FILE
+ ).objects.get(
target_object_id=target.id,
target_content_type=content_type,
- _materialized_path=materialized_path
+ _materialized_path=materialized_path,
)
except BaseFileNode.DoesNotExist:
file_node = None
@@ -690,24 +770,25 @@ def addon_delete_file_node(self, target, user, event_type, payload):
def osfstoragefile_mark_viewed(self, auth, fileversion, file_node):
if auth.user:
# mark fileversion as seen
- FileVersionUserMetadata.objects.get_or_create(user=auth.user, file_version=fileversion)
+ FileVersionUserMetadata.objects.get_or_create(
+ user=auth.user, file_version=fileversion
+ )
@file_signals.file_viewed.connect
def osfstoragefile_update_view_analytics(self, auth, fileversion, file_node):
resource = file_node.target
enqueue_update_analytics(
- resource,
- file_node,
- fileversion.identifier,
- 'view'
+ resource, file_node, fileversion.identifier, "view"
)
@file_signals.file_viewed.connect
def osfstoragefile_viewed_update_metrics(self, auth, fileversion, file_node):
resource = file_node.target
- if waffle.switch_is_active(features.ELASTICSEARCH_METRICS) and isinstance(resource, Preprint):
+ if waffle.switch_is_active(features.ELASTICSEARCH_METRICS) and isinstance(
+ resource, Preprint
+ ):
try:
PreprintView.record_for_preprint(
preprint=resource,
@@ -720,17 +801,25 @@ def osfstoragefile_viewed_update_metrics(self, auth, fileversion, file_node):
@file_signals.file_downloaded.connect
-def osfstoragefile_downloaded_update_analytics(self, auth, fileversion, file_node):
+def osfstoragefile_downloaded_update_analytics(
+ self, auth, fileversion, file_node
+):
resource = file_node.target
if not resource.is_contributor_or_group_member(auth.user):
version_index = int(fileversion.identifier) - 1
- enqueue_update_analytics(resource, file_node, version_index, 'download')
+ enqueue_update_analytics(
+ resource, file_node, version_index, "download"
+ )
@file_signals.file_downloaded.connect
-def osfstoragefile_downloaded_update_metrics(self, auth, fileversion, file_node):
+def osfstoragefile_downloaded_update_metrics(
+ self, auth, fileversion, file_node
+):
resource = file_node.target
- if waffle.switch_is_active(features.ELASTICSEARCH_METRICS) and isinstance(resource, Preprint):
+ if waffle.switch_is_active(features.ELASTICSEARCH_METRICS) and isinstance(
+ resource, Preprint
+ ):
try:
PreprintDownload.record_for_preprint(
preprint=resource,
@@ -745,77 +834,84 @@ def osfstoragefile_downloaded_update_metrics(self, auth, fileversion, file_node)
@must_be_valid_project
def addon_view_or_download_file_legacy(**kwargs):
query_params = request.args.to_dict()
- node = kwargs.get('node') or kwargs['project']
+ node = kwargs.get("node") or kwargs["project"]
- action = query_params.pop('action', 'view')
- provider = kwargs.get('provider', 'osfstorage')
+ action = query_params.pop("action", "view")
+ provider = kwargs.get("provider", "osfstorage")
- if kwargs.get('path'):
- path = kwargs['path']
- elif kwargs.get('fid'):
- path = kwargs['fid']
+ if kwargs.get("path"):
+ path = kwargs["path"]
+ elif kwargs.get("fid"):
+ path = kwargs["fid"]
- if 'download' in request.path or request.path.startswith('/api/v1/'):
- action = 'download'
+ if "download" in request.path or request.path.startswith("/api/v1/"):
+ action = "download"
- if kwargs.get('vid'):
- query_params['version'] = kwargs['vid']
+ if kwargs.get("vid"):
+ query_params["version"] = kwargs["vid"]
# If provider is OSFstorage, check existence of requested file in the filetree
# This prevents invalid GUIDs from being created
- if provider == 'osfstorage':
- node_settings = node.get_addon('osfstorage')
+ if provider == "osfstorage":
+ node_settings = node.get_addon("osfstorage")
try:
path = node_settings.get_root().find_child_by_name(path)._id
except OsfStorageFileNode.DoesNotExist:
raise HTTPError(
- 404, data=dict(
- message_short='File not found',
- message_long='You requested a file that does not exist.'
- )
+ 404,
+ data=dict(
+ message_short="File not found",
+ message_long="You requested a file that does not exist.",
+ ),
)
return redirect(
node.web_url_for(
- 'addon_view_or_download_file',
+ "addon_view_or_download_file",
path=path,
provider=provider,
action=action,
- **query_params
+ **query_params,
),
- code=http_status.HTTP_301_MOVED_PERMANENTLY
+ code=http_status.HTTP_301_MOVED_PERMANENTLY,
)
+
@must_be_contributor_or_public
-def addon_deleted_file(auth, target, error_type='BLAME_PROVIDER', **kwargs):
- """Shows a nice error message to users when they try to view a deleted file
- """
+def addon_deleted_file(auth, target, error_type="BLAME_PROVIDER", **kwargs):
+ """Shows a nice error message to users when they try to view a deleted file"""
# Allow file_node to be passed in so other views can delegate to this one
- file_node = kwargs.get('file_node') or TrashedFileNode.load(kwargs.get('trashed_id'))
+ file_node = kwargs.get("file_node") or TrashedFileNode.load(
+ kwargs.get("trashed_id")
+ )
deleted_by, deleted_on, deleted = None, None, None
if isinstance(file_node, TrashedFileNode):
deleted_by = file_node.deleted_by
deleted_by_guid = file_node.deleted_by._id if deleted_by else None
- deleted_on = file_node.deleted_on.strftime('%c') + ' UTC'
+ deleted_on = file_node.deleted_on.strftime("%c") + " UTC"
deleted = deleted_on
- if getattr(file_node, 'suspended', False):
- error_type = 'FILE_SUSPENDED'
- elif file_node.deleted_by is None or (auth.private_key and auth.private_link.anonymous):
- if file_node.provider == 'osfstorage':
- error_type = 'FILE_GONE_ACTOR_UNKNOWN'
+ if getattr(file_node, "suspended", False):
+ error_type = "FILE_SUSPENDED"
+ elif file_node.deleted_by is None or (
+ auth.private_key and auth.private_link.anonymous
+ ):
+ if file_node.provider == "osfstorage":
+ error_type = "FILE_GONE_ACTOR_UNKNOWN"
else:
- error_type = 'BLAME_PROVIDER'
+ error_type = "BLAME_PROVIDER"
else:
- error_type = 'FILE_GONE'
+ error_type = "FILE_GONE"
else:
- error_type = 'DONT_KNOW'
+ error_type = "DONT_KNOW"
- file_path = kwargs.get('path', file_node.path)
+ file_path = kwargs.get("path", file_node.path)
file_name = file_node.name or os.path.basename(file_path)
file_name_title, file_name_ext = os.path.splitext(file_name)
- provider_full = settings.ADDONS_AVAILABLE_DICT[file_node.provider].full_name
+ provider_full = settings.ADDONS_AVAILABLE_DICT[
+ file_node.provider
+ ].full_name
try:
file_guid = file_node.get_guid()._id
except AttributeError:
@@ -823,48 +919,61 @@ def addon_deleted_file(auth, target, error_type='BLAME_PROVIDER', **kwargs):
format_params = dict(
file_name=markupsafe.escape(file_name),
- deleted_by=markupsafe.escape(getattr(deleted_by, 'fullname', None)),
+ deleted_by=markupsafe.escape(getattr(deleted_by, "fullname", None)),
deleted_on=markupsafe.escape(deleted_on),
provider=markupsafe.escape(provider_full),
- deleted=markupsafe.escape(deleted)
+ deleted=markupsafe.escape(deleted),
)
if deleted_by:
- format_params['deleted_by_guid'] = markupsafe.escape(deleted_by_guid)
+ format_params["deleted_by_guid"] = markupsafe.escape(deleted_by_guid)
error_msg = ERROR_MESSAGES[error_type].format(**format_params)
if isinstance(target, AbstractNode):
- error_msg += format_last_known_metadata(auth, target, file_node, error_type)
+ error_msg += format_last_known_metadata(
+ auth, target, file_node, error_type
+ )
ret = serialize_node(target, auth, primary=True)
ret.update(rubeus.collect_addon_assets(target))
- ret.update({
- 'error': error_msg,
- 'urls': {
- 'render': None,
- 'sharejs': None,
- 'mfr': get_mfr_url(target, file_node.provider),
- 'profile_image': get_profile_image_url(auth.user, 25),
- 'files': target.web_url_for('collect_file_trees'),
- },
- 'extra': {},
- 'size': 9966699, # Prevent file from being edited, just in case
- 'sharejs_uuid': None,
- 'file_name': file_name,
- 'file_path': file_path,
- 'file_name_title': file_name_title,
- 'file_name_ext': file_name_ext,
- 'target_deleted': getattr(target, 'is_deleted', False),
- 'version_id': None,
- 'file_guid': file_guid,
- 'file_id': file_node._id,
- 'provider': file_node.provider,
- 'materialized_path': file_node.materialized_path or file_path,
- 'private': getattr(target.get_addon(file_node.provider), 'is_private', False),
- 'file_tags': list(file_node.tags.filter(system=False).values_list('name', flat=True)) if not file_node._state.adding else [], # Only access ManyRelatedManager if saved
- 'allow_comments': file_node.provider in settings.ADDONS_COMMENTABLE,
- })
+ ret.update(
+ {
+ "error": error_msg,
+ "urls": {
+ "render": None,
+ "sharejs": None,
+ "mfr": get_mfr_url(target, file_node.provider),
+ "profile_image": get_profile_image_url(auth.user, 25),
+ "files": target.web_url_for("collect_file_trees"),
+ },
+ "extra": {},
+ "size": 9966699, # Prevent file from being edited, just in case
+ "sharejs_uuid": None,
+ "file_name": file_name,
+ "file_path": file_path,
+ "file_name_title": file_name_title,
+ "file_name_ext": file_name_ext,
+ "target_deleted": getattr(target, "is_deleted", False),
+ "version_id": None,
+ "file_guid": file_guid,
+ "file_id": file_node._id,
+ "provider": file_node.provider,
+ "materialized_path": file_node.materialized_path or file_path,
+ "private": getattr(
+ target.get_addon(file_node.provider), "is_private", False
+ ),
+ "file_tags": list(
+ file_node.tags.filter(system=False).values_list(
+ "name", flat=True
+ )
+ )
+ if not file_node._state.adding
+ else [], # Only access ManyRelatedManager if saved
+ "allow_comments": file_node.provider
+ in settings.ADDONS_COMMENTABLE,
+ }
+ )
else:
# TODO - serialize deleted metadata for future types of deleted file targets
- ret = {'error': error_msg}
+ ret = {"error": error_msg}
return ret, http_status.HTTP_410_GONE
@@ -872,11 +981,11 @@ def addon_deleted_file(auth, target, error_type='BLAME_PROVIDER', **kwargs):
@must_be_contributor_or_public
def addon_view_or_download_file(auth, path, provider, **kwargs):
extras = request.args.to_dict()
- extras.pop('_', None) # Clean up our url params a bit
- action = extras.get('action', 'view')
- guid = kwargs.get('guid')
- guid_target = getattr(Guid.load(guid), 'referent', None)
- target = guid_target or kwargs.get('node') or kwargs['project']
+ extras.pop("_", None) # Clean up our url params a bit
+ action = extras.get("action", "view")
+ guid = kwargs.get("guid")
+ guid_target = getattr(Guid.load(guid), "referent", None)
+ target = guid_target or kwargs.get("node") or kwargs["project"]
provider_safe = markupsafe.escape(provider)
path_safe = markupsafe.escape(path)
@@ -884,73 +993,76 @@ def addon_view_or_download_file(auth, path, provider, **kwargs):
if not path:
raise HTTPError(http_status.HTTP_400_BAD_REQUEST)
- if hasattr(target, 'get_addon'):
-
+ if hasattr(target, "get_addon"):
node_addon = target.get_addon(provider)
if not isinstance(node_addon, BaseStorageAddon):
- object_text = markupsafe.escape(getattr(target, 'project_or_component', 'this object'))
- raise HTTPError(http_status.HTTP_400_BAD_REQUEST, data={
- 'message_short': 'Bad Request',
- 'message_long': f'The {provider_safe} add-on containing {path_safe} is no longer connected to {object_text}.'
- })
+ object_text = markupsafe.escape(
+ getattr(target, "project_or_component", "this object")
+ )
+ raise HTTPError(
+ http_status.HTTP_400_BAD_REQUEST,
+ data={
+ "message_short": "Bad Request",
+ "message_long": f"The {provider_safe} add-on containing {path_safe} is no longer connected to {object_text}.",
+ },
+ )
if not node_addon.has_auth:
- raise HTTPError(http_status.HTTP_401_UNAUTHORIZED, data={
- 'message_short': 'Unauthorized',
- 'message_long': f'The {provider_safe} add-on containing {path_safe} is no longer authorized.'
- })
+ raise HTTPError(
+ http_status.HTTP_401_UNAUTHORIZED,
+ data={
+ "message_short": "Unauthorized",
+ "message_long": f"The {provider_safe} add-on containing {path_safe} is no longer authorized.",
+ },
+ )
if not node_addon.complete:
- raise HTTPError(http_status.HTTP_400_BAD_REQUEST, data={
- 'message_short': 'Bad Request',
- 'message_long': f'The {provider_safe} add-on containing {path_safe} is no longer configured.'
- })
+ raise HTTPError(
+ http_status.HTTP_400_BAD_REQUEST,
+ data={
+ "message_short": "Bad Request",
+ "message_long": f"The {provider_safe} add-on containing {path_safe} is no longer configured.",
+ },
+ )
savepoint_id = transaction.savepoint()
try:
file_node = BaseFileNode.resolve_class(
provider, BaseFileNode.FILE
- ).get_or_create(
- target, path, **extras
- )
+ ).get_or_create(target, path, **extras)
except addon_errors.QueryError as e:
raise HTTPError(
http_status.HTTP_400_BAD_REQUEST,
- data={
- 'message_short': 'Bad Request',
- 'message_long': str(e)
- }
+ data={"message_short": "Bad Request", "message_long": str(e)},
)
except addon_errors.DoesNotExist as e:
raise HTTPError(
http_status.HTTP_404_NOT_FOUND,
- data={
- 'message_short': 'Not Found',
- 'message_long': str(e)
- }
+ data={"message_short": "Not Found", "message_long": str(e)},
)
# Note: Cookie is provided for authentication to waterbutler
# it is overridden to force authentication as the current user
# the auth header is also pass to support basic auth
version = file_node.touch(
- request.headers.get('Authorization'),
- **dict(
- extras,
- cookie=request.cookies.get(settings.COOKIE_NAME)
- )
+ request.headers.get("Authorization"),
+ **dict(extras, cookie=request.cookies.get(settings.COOKIE_NAME)),
)
# There's no download action redirect to the Ember front-end file view and create guid.
- if action != 'download':
- if isinstance(target, Node) and waffle.flag_is_active(request, features.EMBER_FILE_PROJECT_DETAIL):
+ if action != "download":
+ if isinstance(target, Node) and waffle.flag_is_active(
+ request, features.EMBER_FILE_PROJECT_DETAIL
+ ):
guid = file_node.get_guid(create=True)
- return redirect(f'{settings.DOMAIN}{guid._id}/')
- if isinstance(target, Registration) and waffle.flag_is_active(request, features.EMBER_FILE_REGISTRATION_DETAIL):
+ return redirect(f"{settings.DOMAIN}{guid._id}/")
+ if isinstance(target, Registration) and waffle.flag_is_active(
+ request, features.EMBER_FILE_REGISTRATION_DETAIL
+ ):
guid = file_node.get_guid(create=True)
- return redirect(f'{settings.DOMAIN}{guid._id}/')
+ return redirect(f"{settings.DOMAIN}{guid._id}/")
if version is None:
# File is either deleted or unable to be found in the provider location
@@ -960,86 +1072,140 @@ def addon_view_or_download_file(auth, path, provider, **kwargs):
file_node = BaseFileNode.load(path)
if not file_node:
- raise HTTPError(http_status.HTTP_404_NOT_FOUND, data={
- 'message_short': 'File Not Found',
- 'message_long': 'The requested file could not be found.'
- })
+ raise HTTPError(
+ http_status.HTTP_404_NOT_FOUND,
+ data={
+ "message_short": "File Not Found",
+ "message_long": "The requested file could not be found.",
+ },
+ )
- if file_node.kind == 'folder':
- raise HTTPError(http_status.HTTP_400_BAD_REQUEST, data={
- 'message_short': 'Bad Request',
- 'message_long': 'You cannot request a folder from this endpoint.'
- })
+ if file_node.kind == "folder":
+ raise HTTPError(
+ http_status.HTTP_400_BAD_REQUEST,
+ data={
+ "message_short": "Bad Request",
+ "message_long": "You cannot request a folder from this endpoint.",
+ },
+ )
# Allow osfstorage to redirect if the deep url can be used to find a valid file_node
- if file_node.provider == 'osfstorage' and not file_node.is_deleted:
+ if file_node.provider == "osfstorage" and not file_node.is_deleted:
return redirect(
- file_node.target.web_url_for('addon_view_or_download_file', path=file_node._id, provider=file_node.provider)
+ file_node.target.web_url_for(
+ "addon_view_or_download_file",
+ path=file_node._id,
+ provider=file_node.provider,
+ )
)
- return addon_deleted_file(target=target, file_node=file_node, path=path, **kwargs)
+ return addon_deleted_file(
+ target=target, file_node=file_node, path=path, **kwargs
+ )
else:
transaction.savepoint_commit(savepoint_id)
# TODO clean up these urls and unify what is used as a version identifier
- if request.method == 'HEAD':
- return make_response(('', http_status.HTTP_302_FOUND, {
- 'Location': file_node.generate_waterbutler_url(**dict(extras, direct=None, version=version.identifier, _internal=extras.get('mode') == 'render'))
- }))
+ if request.method == "HEAD":
+ return make_response(
+ (
+ "",
+ http_status.HTTP_302_FOUND,
+ {
+ "Location": file_node.generate_waterbutler_url(
+ **dict(
+ extras,
+ direct=None,
+ version=version.identifier,
+ _internal=extras.get("mode") == "render",
+ )
+ )
+ },
+ )
+ )
- if action == 'download':
- format = extras.get('format')
+ if action == "download":
+ format = extras.get("format")
_, extension = os.path.splitext(file_node.name)
# avoid rendering files with the same format type.
- if format and f'.{format.lower()}' != extension.lower():
- return redirect('{}/export?format={}&url={}'.format(get_mfr_url(target, provider), format, quote(file_node.generate_waterbutler_url(
- **dict(extras, direct=None, version=version.identifier, _internal=extras.get('mode') == 'render')
- ))))
- return redirect(file_node.generate_waterbutler_url(**dict(extras, direct=None, version=version.identifier, _internal=extras.get('mode') == 'render')))
-
- if action == 'get_guid':
- draft_id = extras.get('draft')
+ if format and f".{format.lower()}" != extension.lower():
+ return redirect(
+ "{}/export?format={}&url={}".format(
+ get_mfr_url(target, provider),
+ format,
+ quote(
+ file_node.generate_waterbutler_url(
+ **dict(
+ extras,
+ direct=None,
+ version=version.identifier,
+ _internal=extras.get("mode") == "render",
+ )
+ )
+ ),
+ )
+ )
+ return redirect(
+ file_node.generate_waterbutler_url(
+ **dict(
+ extras,
+ direct=None,
+ version=version.identifier,
+ _internal=extras.get("mode") == "render",
+ )
+ )
+ )
+
+ if action == "get_guid":
+ draft_id = extras.get("draft")
draft = DraftRegistration.load(draft_id)
if draft is None:
- raise HTTPError(http_status.HTTP_400_BAD_REQUEST, data={
- 'message_short': 'Bad Request',
- 'message_long': 'File not associated with required object.'
- })
+ raise HTTPError(
+ http_status.HTTP_400_BAD_REQUEST,
+ data={
+ "message_short": "Bad Request",
+ "message_long": "File not associated with required object.",
+ },
+ )
guid = file_node.get_guid(create=True)
guid.referent.save()
return dict(guid=guid._id)
- if len(request.path.strip('/').split('/')) > 1:
+ if len(request.path.strip("/").split("/")) > 1:
guid = file_node.get_guid(create=True)
# NOTE: furl encoding to be verified later
- return redirect(furl(f'/{guid._id}/', args=extras).url)
+ return redirect(furl(f"/{guid._id}/", args=extras).url)
if isinstance(target, Preprint):
# Redirecting preprint file guids to the preprint detail page
- return redirect(f'/{target._id}/')
+ return redirect(f"/{target._id}/")
return addon_view_file(auth, target, file_node, version)
@collect_auth
def persistent_file_download(auth, **kwargs):
- id_or_guid = kwargs.get('fid_or_guid')
+ id_or_guid = kwargs.get("fid_or_guid")
file = BaseFileNode.active.filter(_id=id_or_guid).first()
if not file:
guid = Guid.load(id_or_guid)
if guid:
file = guid.referent
else:
- raise HTTPError(http_status.HTTP_404_NOT_FOUND, data={
- 'message_short': 'File Not Found',
- 'message_long': 'The requested file could not be found.'
- })
+ raise HTTPError(
+ http_status.HTTP_404_NOT_FOUND,
+ data={
+ "message_short": "File Not Found",
+ "message_long": "The requested file could not be found.",
+ },
+ )
if not file.is_file:
- raise HTTPError(http_status.HTTP_400_BAD_REQUEST, data={
- 'message_long': 'Downloading folders is not permitted.'
- })
+ raise HTTPError(
+ http_status.HTTP_400_BAD_REQUEST,
+ data={"message_long": "Downloading folders is not permitted."},
+ )
- auth_redirect = check_contributor_auth(file.target, auth,
- include_public=True,
- include_view_only_anon=True)
+ auth_redirect = check_contributor_auth(
+ file.target, auth, include_public=True, include_view_only_anon=True
+ )
if auth_redirect:
return auth_redirect
@@ -1047,19 +1213,23 @@ def persistent_file_download(auth, **kwargs):
return redirect(
file.generate_waterbutler_url(**query_params),
- code=http_status.HTTP_302_FOUND
+ code=http_status.HTTP_302_FOUND,
)
def addon_view_or_download_quickfile(**kwargs):
- fid = kwargs.get('fid', 'NOT_AN_FID')
+ fid = kwargs.get("fid", "NOT_AN_FID")
file_ = OsfStorageFile.load(fid)
if not file_:
- raise HTTPError(http_status.HTTP_404_NOT_FOUND, data={
- 'message_short': 'File Not Found',
- 'message_long': 'The requested file could not be found.'
- })
- return proxy_url(f'/project/{file_.target._id}/files/osfstorage/{fid}/')
+ raise HTTPError(
+ http_status.HTTP_404_NOT_FOUND,
+ data={
+ "message_short": "File Not Found",
+ "message_long": "The requested file could not be found.",
+ },
+ )
+ return proxy_url(f"/project/{file_.target._id}/files/osfstorage/{fid}/")
+
def addon_view_file(auth, node, file_node, version):
# TODO: resolve circular import issue
@@ -1067,18 +1237,22 @@ def addon_view_file(auth, node, file_node, version):
if isinstance(version, tuple):
version, error = version
- error = error.replace('\n', '').strip()
+ error = error.replace("\n", "").strip()
else:
error = None
ret = serialize_node(node, auth, primary=True)
- if file_node._id + '-' + version._id not in node.file_guid_to_share_uuids:
- node.file_guid_to_share_uuids[file_node._id + '-' + version._id] = uuid.uuid4()
+ if file_node._id + "-" + version._id not in node.file_guid_to_share_uuids:
+ node.file_guid_to_share_uuids[file_node._id + "-" + version._id] = (
+ uuid.uuid4()
+ )
node.save()
- if ret['user']['can_edit']:
- sharejs_uuid = str(node.file_guid_to_share_uuids[file_node._id + '-' + version._id])
+ if ret["user"]["can_edit"]:
+ sharejs_uuid = str(
+ node.file_guid_to_share_uuids[file_node._id + "-" + version._id]
+ )
else:
sharejs_uuid = None
@@ -1086,54 +1260,70 @@ def addon_view_file(auth, node, file_node, version):
download_url = furl(
request.url,
netloc=internal_furl.netloc,
- args=dict(request.args, **{
- 'direct': None,
- 'mode': 'render',
- 'action': 'download',
- 'public_file': node.is_public,
- })
+ args=dict(
+ request.args,
+ **{
+ "direct": None,
+ "mode": "render",
+ "action": "download",
+ "public_file": node.is_public,
+ },
+ ),
)
mfr_url = get_mfr_url(node, file_node.provider)
# NOTE: furl encoding to be verified later
- render_url = furl(
- mfr_url,
- path=['render'],
- args={'url': download_url.url}
+ render_url = furl(mfr_url, path=["render"], args={"url": download_url.url})
+
+ version_names = (
+ BaseFileVersionsThrough.objects.filter(basefilenode_id=file_node.id)
+ .order_by("-fileversion_id")
+ .values_list("version_name", flat=True)
)
- version_names = BaseFileVersionsThrough.objects.filter(
- basefilenode_id=file_node.id
- ).order_by('-fileversion_id').values_list('version_name', flat=True)
-
- ret.update({
- 'urls': {
- 'render': render_url.url,
- 'mfr': mfr_url,
- 'sharejs': wiki_settings.SHAREJS_URL,
- 'profile_image': get_profile_image_url(auth.user, 25),
- 'files': node.web_url_for('collect_file_trees'),
- 'archived_from': get_archived_from_url(node, file_node) if node.is_registration else None,
- },
- 'error': error,
- 'file_name': file_node.name,
- 'file_name_title': os.path.splitext(file_node.name)[0],
- 'file_name_ext': os.path.splitext(file_node.name)[1],
- 'version_id': version.identifier,
- 'file_path': file_node.path,
- 'sharejs_uuid': sharejs_uuid,
- 'provider': file_node.provider,
- 'materialized_path': file_node.materialized_path,
- 'extra': version.metadata.get('extra', {}),
- 'size': version.size if version.size is not None else 9966699,
- 'private': getattr(node.get_addon(file_node.provider), 'is_private', False),
- 'file_tags': list(file_node.tags.filter(system=False).values_list('name', flat=True)) if not file_node._state.adding else [], # Only access ManyRelatedManager if saved
- 'file_guid': file_node.get_guid()._id,
- 'file_id': file_node._id,
- 'allow_comments': file_node.provider in settings.ADDONS_COMMENTABLE,
- 'checkout_user': file_node.checkout._id if file_node.checkout else None,
- 'version_names': list(version_names)
- })
+ ret.update(
+ {
+ "urls": {
+ "render": render_url.url,
+ "mfr": mfr_url,
+ "sharejs": wiki_settings.SHAREJS_URL,
+ "profile_image": get_profile_image_url(auth.user, 25),
+ "files": node.web_url_for("collect_file_trees"),
+ "archived_from": get_archived_from_url(node, file_node)
+ if node.is_registration
+ else None,
+ },
+ "error": error,
+ "file_name": file_node.name,
+ "file_name_title": os.path.splitext(file_node.name)[0],
+ "file_name_ext": os.path.splitext(file_node.name)[1],
+ "version_id": version.identifier,
+ "file_path": file_node.path,
+ "sharejs_uuid": sharejs_uuid,
+ "provider": file_node.provider,
+ "materialized_path": file_node.materialized_path,
+ "extra": version.metadata.get("extra", {}),
+ "size": version.size if version.size is not None else 9966699,
+ "private": getattr(
+ node.get_addon(file_node.provider), "is_private", False
+ ),
+ "file_tags": list(
+ file_node.tags.filter(system=False).values_list(
+ "name", flat=True
+ )
+ )
+ if not file_node._state.adding
+ else [], # Only access ManyRelatedManager if saved
+ "file_guid": file_node.get_guid()._id,
+ "file_id": file_node._id,
+ "allow_comments": file_node.provider
+ in settings.ADDONS_COMMENTABLE,
+ "checkout_user": file_node.checkout._id
+ if file_node.checkout
+ else None,
+ "version_names": list(version_names),
+ }
+ )
ret.update(rubeus.collect_addon_assets(node))
return ret
@@ -1143,5 +1333,9 @@ def get_archived_from_url(node, file_node):
if file_node.copied_from:
trashed = TrashedFileNode.load(file_node.copied_from._id)
if not trashed:
- return node.registered_from.web_url_for('addon_view_or_download_file', provider=file_node.provider, path=file_node.copied_from._id)
+ return node.registered_from.web_url_for(
+ "addon_view_or_download_file",
+ provider=file_node.provider,
+ path=file_node.copied_from._id,
+ )
return None
diff --git a/addons/bitbucket/api.py b/addons/bitbucket/api.py
index 4f1176d9a9d..6a453ca8c6f 100644
--- a/addons/bitbucket/api.py
+++ b/addons/bitbucket/api.py
@@ -8,19 +8,18 @@
class BitbucketClient(BaseClient):
-
def __init__(self, access_token=None):
self.access_token = ensure_str(access_token)
@property
def _default_headers(self):
if self.access_token:
- return {'Authorization': f'Bearer {self.access_token}'}
+ return {"Authorization": f"Bearer {self.access_token}"}
return {}
@property
def username(self):
- return self.user()['username']
+ return self.user()["username"]
def user(self):
"""Fetch the user identified by ``self.access_token``.
@@ -41,10 +40,10 @@ def user(self):
:return: a metadata object representing the user
"""
res = self._make_request(
- 'GET',
- self._build_url(settings.BITBUCKET_V2_API_URL, 'user'),
- expects=(200, ),
- throws=HTTPError(401)
+ "GET",
+ self._build_url(settings.BITBUCKET_V2_API_URL, "user"),
+ expects=(200,),
+ throws=HTTPError(401),
)
return res.json()
@@ -60,10 +59,15 @@ def repo(self, user, repo):
:return: Dict of repo information
"""
res = self._make_request(
- 'GET',
- self._build_url(settings.BITBUCKET_V2_API_URL, 'repositories', user, repo),
- expects=(200, 404, ),
- throws=HTTPError(401)
+ "GET",
+ self._build_url(
+ settings.BITBUCKET_V2_API_URL, "repositories", user, repo
+ ),
+ expects=(
+ 200,
+ 404,
+ ),
+ throws=HTTPError(401),
)
return None if res.status_code == 404 else res.json()
@@ -78,18 +82,18 @@ def repos(self):
:return: list of repository objects
"""
query_params = {
- 'role': 'contributor',
- 'pagelen': 100,
- 'fields': 'values.full_name'
+ "role": "contributor",
+ "pagelen": 100,
+ "fields": "values.full_name",
}
res = self._make_request(
- 'GET',
- self._build_url(settings.BITBUCKET_V2_API_URL, 'repositories'),
- expects=(200, ),
+ "GET",
+ self._build_url(settings.BITBUCKET_V2_API_URL, "repositories"),
+ expects=(200,),
throws=HTTPError(401),
- params=query_params
+ params=query_params,
)
- repo_list = res.json()['values']
+ repo_list = res.json()["values"]
return repo_list
@@ -107,29 +111,31 @@ def team_repos(self):
"""
query_params = {
- 'role': 'contributor',
- 'pagelen': 100,
- 'fields': 'values.links.repositories.href'
+ "role": "contributor",
+ "pagelen": 100,
+ "fields": "values.links.repositories.href",
}
res = self._make_request(
- 'GET',
- self._build_url(settings.BITBUCKET_V2_API_URL, 'teams'),
- expects=(200, ),
+ "GET",
+ self._build_url(settings.BITBUCKET_V2_API_URL, "teams"),
+ expects=(200,),
throws=HTTPError(401),
- params=query_params
+ params=query_params,
)
- team_repos_url_list = [x['links']['repositories']['href'] for x in res.json()['values']]
+ team_repos_url_list = [
+ x["links"]["repositories"]["href"] for x in res.json()["values"]
+ ]
team_repos = []
for team_repos_url in team_repos_url_list:
res = self._make_request(
- 'GET',
+ "GET",
team_repos_url,
- expects=(200, ),
+ expects=(200,),
throws=HTTPError(401),
- params={'fields': 'values.full_name'}
+ params={"fields": "values.full_name"},
)
- team_repos.extend(res.json()['values'])
+ team_repos.extend(res.json()["values"])
return team_repos
@@ -147,12 +153,14 @@ def repo_default_branch(self, user, repo):
"""
res = self._make_request(
- 'GET',
- self._build_url(settings.BITBUCKET_V2_API_URL, 'repositories', user, repo),
- expects=(200, ),
- throws=HTTPError(401)
+ "GET",
+ self._build_url(
+ settings.BITBUCKET_V2_API_URL, "repositories", user, repo
+ ),
+ expects=(200,),
+ throws=HTTPError(401),
)
- return res.json()['mainbranch']['name']
+ return res.json()["mainbranch"]["name"]
def branches(self, user, repo):
"""List a repo's branches. This endpoint is paginated and may require
@@ -167,29 +175,34 @@ def branches(self, user, repo):
:return: List of branch dicts
"""
branches = []
- url = self._build_url(settings.BITBUCKET_V2_API_URL, 'repositories', user, repo, 'refs', 'branches')
+ url = self._build_url(
+ settings.BITBUCKET_V2_API_URL,
+ "repositories",
+ user,
+ repo,
+ "refs",
+ "branches",
+ )
while True:
res = self._make_request(
- 'GET',
- url,
- expects=(200, ),
- throws=HTTPError(401)
+ "GET", url, expects=(200,), throws=HTTPError(401)
)
res_data = res.json()
- branches.extend(res_data['values'])
- url = res_data.get('next', None)
+ branches.extend(res_data["values"])
+ url = res_data.get("next", None)
if not url:
break
return branches
def ref_to_params(branch=None, sha=None):
-
- params = urlencode({
- key: value
- for key, value in {'branch': branch, 'sha': sha}.items()
- if value
- })
+ params = urlencode(
+ {
+ key: value
+ for key, value in {"branch": branch, "sha": sha}.items()
+ if value
+ }
+ )
if params:
- return '?' + params
- return ''
+ return "?" + params
+ return ""
diff --git a/addons/bitbucket/apps.py b/addons/bitbucket/apps.py
index b03cce6f32e..1bc7dd37ad7 100644
--- a/addons/bitbucket/apps.py
+++ b/addons/bitbucket/apps.py
@@ -11,83 +11,92 @@
def bitbucket_hgrid_data(node_settings, auth, **kwargs):
-
# Quit if no repo linked
if not node_settings.complete:
return
- connection = BitbucketClient(access_token=node_settings.external_account.oauth_key)
+ connection = BitbucketClient(
+ access_token=node_settings.external_account.oauth_key
+ )
node = node_settings.owner
if node.is_public and not node.is_contributor_or_group_member(auth.user):
-
repo = connection.repo(node_settings.user, node_settings.repo)
if not repo:
# TODO: Add warning message
- logger.error('Could not access Bitbucket repo')
+ logger.error("Could not access Bitbucket repo")
return None
try:
branch, sha, branches = get_refs(
node_settings,
- branch=kwargs.get('branch'),
- sha=kwargs.get('sha'),
+ branch=kwargs.get("branch"),
+ sha=kwargs.get("sha"),
connection=connection,
)
except (NotFoundError, Exception):
# TODO: Show an alert or change Bitbucket configuration?
- logger.error('Bitbucket repo not found')
+ logger.error("Bitbucket repo not found")
return
ref = None if branch is None else ref_to_params(branch, sha)
- name_tpl = '{user}/{repo}'.format(
+ name_tpl = "{user}/{repo}".format(
user=node_settings.user, repo=node_settings.repo
)
permissions = {
- 'edit': False,
- 'view': True,
- 'private': node_settings.is_private
+ "edit": False,
+ "view": True,
+ "private": node_settings.is_private,
}
urls = {
- 'upload': None,
- 'fetch': node_settings.owner.api_url + 'bitbucket/hgrid/' + (ref or ''),
- 'branch': node_settings.owner.api_url + 'bitbucket/hgrid/root/',
- 'zip': node_settings.owner.api_url + 'bitbucket/zipball/' + (ref or ''),
- 'repo': f'https://bitbucket.com/{node_settings.user}/{node_settings.repo}/branch/'
+ "upload": None,
+ "fetch": node_settings.owner.api_url
+ + "bitbucket/hgrid/"
+ + (ref or ""),
+ "branch": node_settings.owner.api_url + "bitbucket/hgrid/root/",
+ "zip": node_settings.owner.api_url
+ + "bitbucket/zipball/"
+ + (ref or ""),
+ "repo": f"https://bitbucket.com/{node_settings.user}/{node_settings.repo}/branch/",
}
- branch_names = [each['name'] for each in branches]
+ branch_names = [each["name"] for each in branches]
if not branch_names:
- branch_names = [branch] # if repo un-init-ed then still add default branch to list of branches
-
- return [rubeus.build_addon_root(
- node_settings,
- name_tpl,
- urls=urls,
- permissions=permissions,
- branches=branch_names,
- defaultBranch=branch,
- private_key=kwargs.get('view_only', None),
- )]
+ branch_names = [
+ branch
+ ] # if repo un-init-ed then still add default branch to list of branches
+
+ return [
+ rubeus.build_addon_root(
+ node_settings,
+ name_tpl,
+ urls=urls,
+ permissions=permissions,
+ branches=branch_names,
+ defaultBranch=branch,
+ private_key=kwargs.get("view_only", None),
+ )
+ ]
+
HERE = os.path.dirname(os.path.abspath(__file__))
NODE_SETTINGS_TEMPLATE = os.path.join(
HERE,
- 'templates',
- 'bitbucket_node_settings.mako',
+ "templates",
+ "bitbucket_node_settings.mako",
)
-class BitbucketAddonConfig(BaseAddonAppConfig):
+class BitbucketAddonConfig(BaseAddonAppConfig):
default = True
- name = 'addons.bitbucket'
- label = 'addons_bitbucket'
- full_name = 'Bitbucket'
- short_name = 'bitbucket'
- configs = ['accounts', 'node']
- categories = ['storage']
- owners = ['user', 'node']
+ name = "addons.bitbucket"
+ label = "addons_bitbucket"
+ full_name = "Bitbucket"
+ short_name = "bitbucket"
+ configs = ["accounts", "node"]
+ categories = ["storage"]
+ owners = ["user", "node"]
has_hgrid_files = True
node_settings_template = NODE_SETTINGS_TEMPLATE
@@ -95,14 +104,14 @@ class BitbucketAddonConfig(BaseAddonAppConfig):
def get_hgrid_data(self):
return bitbucket_hgrid_data
- FILE_ADDED = 'bitbucket_file_added'
- FILE_REMOVED = 'bitbucket_file_removed'
- FILE_UPDATED = 'bitbucket_file_updated'
- FOLDER_CREATED = 'bitbucket_folder_created'
- NODE_AUTHORIZED = 'bitbucket_node_authorized'
- NODE_DEAUTHORIZED = 'bitbucket_node_deauthorized'
- NODE_DEAUTHORIZED_NO_USER = 'bitbucket_node_deauthorized_no_user'
- REPO_LINKED = 'bitbucket_repo_linked'
+ FILE_ADDED = "bitbucket_file_added"
+ FILE_REMOVED = "bitbucket_file_removed"
+ FILE_UPDATED = "bitbucket_file_updated"
+ FOLDER_CREATED = "bitbucket_folder_created"
+ NODE_AUTHORIZED = "bitbucket_node_authorized"
+ NODE_DEAUTHORIZED = "bitbucket_node_deauthorized"
+ NODE_DEAUTHORIZED_NO_USER = "bitbucket_node_deauthorized_no_user"
+ REPO_LINKED = "bitbucket_repo_linked"
actions = (
FILE_ADDED,
@@ -112,17 +121,19 @@ def get_hgrid_data(self):
NODE_AUTHORIZED,
NODE_DEAUTHORIZED,
NODE_DEAUTHORIZED_NO_USER,
- REPO_LINKED)
+ REPO_LINKED,
+ )
@property
def routes(self):
from . import routes
+
return [routes.api_routes]
@property
def user_settings(self):
- return self.get_model('UserSettings')
+ return self.get_model("UserSettings")
@property
def node_settings(self):
- return self.get_model('NodeSettings')
+ return self.get_model("NodeSettings")
diff --git a/addons/bitbucket/models.py b/addons/bitbucket/models.py
index 50ebc39a2de..61eb79af714 100644
--- a/addons/bitbucket/models.py
+++ b/addons/bitbucket/models.py
@@ -6,7 +6,8 @@
from addons.base.models import (
BaseOAuthNodeSettings,
BaseOAuthUserSettings,
- BaseStorageAddon,)
+ BaseStorageAddon,
+)
from addons.bitbucket.api import BitbucketClient
from addons.bitbucket.serializer import BitbucketSerializer
from addons.bitbucket import settings as bitbucket_settings
@@ -21,7 +22,7 @@
class BitbucketFileNode(BaseFileNode):
- _provider = 'bitbucket'
+ _provider = "bitbucket"
class BitbucketFolder(BitbucketFileNode, Folder):
@@ -29,19 +30,22 @@ class BitbucketFolder(BitbucketFileNode, Folder):
class BitbucketFile(BitbucketFileNode, File):
- version_identifier = 'commitSha'
+ version_identifier = "commitSha"
- def touch(self, auth_header, revision=None, commitSha=None, branch=None, **kwargs):
+ def touch(
+ self, auth_header, revision=None, commitSha=None, branch=None, **kwargs
+ ):
revision = revision or commitSha or branch
return super().touch(auth_header, revision=revision, **kwargs)
@property
def _hashes(self):
try:
- return {'commit': self._history[-1]['extra']['commitSha']}
+ return {"commit": self._history[-1]["extra"]["commitSha"]}
except (IndexError, KeyError):
return None
+
class BitbucketProvider(ExternalProvider):
"""Provider to handler Bitbucket OAuth workflow
@@ -53,8 +57,8 @@ class BitbucketProvider(ExternalProvider):
"""
- name = 'Bitbucket'
- short_name = 'bitbucket'
+ name = "Bitbucket"
+ short_name = "bitbucket"
client_id = bitbucket_settings.CLIENT_ID
client_secret = bitbucket_settings.CLIENT_SECRET
@@ -72,13 +76,13 @@ def handle_callback(self, response):
record to the user and saves the account info.
"""
- client = BitbucketClient(access_token=response['access_token'])
+ client = BitbucketClient(access_token=response["access_token"])
user_info = client.user()
return {
- 'provider_id': user_info['uuid'],
- 'profile_url': user_info['links']['html']['href'],
- 'display_name': user_info['username']
+ "provider_id": user_info["uuid"],
+ "profile_url": user_info["links"]["html"]["href"],
+ "display_name": user_info["username"],
}
def fetch_access_token(self, force_refresh=False):
@@ -94,6 +98,7 @@ class UserSettings(BaseOAuthUserSettings):
* Bitbucket does not support remote revocation of access tokens.
"""
+
oauth_provider = BitbucketProvider
serializer = BitbucketSerializer
@@ -101,7 +106,9 @@ class UserSettings(BaseOAuthUserSettings):
# Assumes oldest connected account is primary.
@property
def public_id(self):
- bitbucket_accounts = self.owner.external_accounts.filter(provider=self.oauth_provider.short_name)
+ bitbucket_accounts = self.owner.external_accounts.filter(
+ provider=self.oauth_provider.short_name
+ )
if bitbucket_accounts:
return bitbucket_accounts[0].display_name
return None
@@ -114,7 +121,9 @@ class NodeSettings(BaseOAuthNodeSettings, BaseStorageAddon):
user = models.TextField(blank=True, null=True)
repo = models.TextField(blank=True, null=True)
hook_id = models.TextField(blank=True, null=True)
- user_settings = models.ForeignKey(UserSettings, null=True, blank=True, on_delete=models.CASCADE)
+ user_settings = models.ForeignKey(
+ UserSettings, null=True, blank=True, on_delete=models.CASCADE
+ )
_api = None
@@ -132,7 +141,7 @@ def folder_id(self):
@property
def folder_name(self):
if self.complete:
- return f'{self.user}/{self.repo}'
+ return f"{self.user}/{self.repo}"
return None
@property
@@ -141,15 +150,17 @@ def folder_path(self):
@property
def complete(self):
- return self.has_auth and self.repo is not None and self.user is not None
+ return (
+ self.has_auth and self.repo is not None and self.user is not None
+ )
def authorize(self, user_settings, save=False):
self.user_settings = user_settings
self.owner.add_log(
- action='bitbucket_node_authorized',
+ action="bitbucket_node_authorized",
params={
- 'project': self.owner.parent_id,
- 'node': self.owner._id,
+ "project": self.owner.parent_id,
+ "node": self.owner._id,
},
auth=Auth(user_settings.owner),
)
@@ -166,10 +177,10 @@ def deauthorize(self, auth=None, log=True):
self.clear_settings()
if log:
self.owner.add_log(
- action='bitbucket_node_deauthorized',
+ action="bitbucket_node_deauthorized",
params={
- 'project': self.owner.parent_id,
- 'node': self.owner._id,
+ "project": self.owner.parent_id,
+ "node": self.owner._id,
},
auth=auth,
)
@@ -185,22 +196,24 @@ def delete(self, save=False):
@property
def repo_url(self):
if self.user and self.repo:
- return f'https://bitbucket.org/{self.user}/{self.repo}/'
+ return f"https://bitbucket.org/{self.user}/{self.repo}/"
@property
def short_url(self):
if self.user and self.repo:
- return '/'.join([self.user, self.repo])
+ return "/".join([self.user, self.repo])
@property
def is_private(self):
repo = self.fetch_repo()
if repo:
- return repo['is_private']
+ return repo["is_private"]
return None
def fetch_repo(self):
- connection = BitbucketClient(access_token=self.api.fetch_access_token())
+ connection = BitbucketClient(
+ access_token=self.api.fetch_access_token()
+ )
return connection.repo(user=self.user, repo=self.repo)
def fetch_access_token(self):
@@ -209,20 +222,23 @@ def fetch_access_token(self):
# TODO: Delete me and replace with serialize_settings / Knockout
def to_json(self, user):
ret = super().to_json(user)
- user_settings = user.get_addon('bitbucket')
- ret.update({
- 'user_has_auth': user_settings and user_settings.has_auth,
- 'is_registration': self.owner.is_registration,
- })
+ user_settings = user.get_addon("bitbucket")
+ ret.update(
+ {
+ "user_has_auth": user_settings and user_settings.has_auth,
+ "is_registration": self.owner.is_registration,
+ }
+ )
if self.user_settings and self.user_settings.has_auth:
- connection = BitbucketClient(access_token=self.api.fetch_access_token())
+ connection = BitbucketClient(
+ access_token=self.api.fetch_access_token()
+ )
valid_credentials = True
try:
mine = connection.repos()
repo_names = [
- repo['full_name'].replace('/', ' / ')
- for repo in mine
+ repo["full_name"].replace("/", " / ") for repo in mine
]
except Exception:
repo_names = []
@@ -230,65 +246,71 @@ def to_json(self, user):
owner = self.user_settings.owner
if owner == user:
- ret.update({'repo_names': repo_names})
- ret.update({
- 'node_has_auth': True,
- 'bitbucket_user': self.user or '',
- 'bitbucket_repo': self.repo or '',
- 'bitbucket_repo_full_name': f'{self.user} / {self.repo}' if (self.user and self.repo) else '',
- 'auth_osf_name': owner.fullname,
- 'auth_osf_url': owner.url,
- 'auth_osf_id': owner._id,
- 'bitbucket_user_name': self.external_account.display_name,
- 'bitbucket_user_url': self.external_account.profile_url,
- 'is_owner': owner == user,
- 'valid_credentials': valid_credentials,
- 'addons_url': web_url_for('user_addons'),
- 'files_url': self.owner.web_url_for('collect_file_trees')
- })
+ ret.update({"repo_names": repo_names})
+ ret.update(
+ {
+ "node_has_auth": True,
+ "bitbucket_user": self.user or "",
+ "bitbucket_repo": self.repo or "",
+ "bitbucket_repo_full_name": f"{self.user} / {self.repo}"
+ if (self.user and self.repo)
+ else "",
+ "auth_osf_name": owner.fullname,
+ "auth_osf_url": owner.url,
+ "auth_osf_id": owner._id,
+ "bitbucket_user_name": self.external_account.display_name,
+ "bitbucket_user_url": self.external_account.profile_url,
+ "is_owner": owner == user,
+ "valid_credentials": valid_credentials,
+ "addons_url": web_url_for("user_addons"),
+ "files_url": self.owner.web_url_for("collect_file_trees"),
+ }
+ )
return ret
def serialize_waterbutler_credentials(self):
if not self.complete or not self.repo:
- raise exceptions.AddonError('Addon is not authorized')
- return {'token': self.api.fetch_access_token()}
+ raise exceptions.AddonError("Addon is not authorized")
+ return {"token": self.api.fetch_access_token()}
def serialize_waterbutler_settings(self):
if not self.complete:
- raise exceptions.AddonError('Repo is not configured')
+ raise exceptions.AddonError("Repo is not configured")
return {
- 'owner': self.user,
- 'repo': self.repo,
+ "owner": self.user,
+ "repo": self.repo,
}
def create_waterbutler_log(self, auth, action, metadata):
- path = metadata['path']
+ path = metadata["path"]
- url = self.owner.web_url_for('addon_view_or_download_file', path=path, provider='bitbucket')
+ url = self.owner.web_url_for(
+ "addon_view_or_download_file", path=path, provider="bitbucket"
+ )
sha, urls = None, {}
try:
- sha = metadata['extra']['commitSha']
+ sha = metadata["extra"]["commitSha"]
urls = {
- 'view': f'{url}?commitSha={sha}',
- 'download': f'{url}?action=download&commitSha={sha}'
+ "view": f"{url}?commitSha={sha}",
+ "download": f"{url}?action=download&commitSha={sha}",
}
except KeyError:
pass
self.owner.add_log(
- f'bitbucket_{action}',
+ f"bitbucket_{action}",
auth=auth,
params={
- 'project': self.owner.parent_id,
- 'node': self.owner._id,
- 'path': path,
- 'urls': urls,
- 'bitbucket': {
- 'user': self.user,
- 'repo': self.repo,
- 'commitSha': sha,
+ "project": self.owner.parent_id,
+ "node": self.owner._id,
+ "path": path,
+ "urls": urls,
+ "bitbucket": {
+ "user": self.user,
+ "repo": self.repo,
+ "commitSha": sha,
},
},
)
@@ -320,12 +342,14 @@ def before_page_load(self, node, user):
repo_data = self.fetch_repo()
if repo_data:
- node_permissions = 'public' if node.is_public else 'private'
- repo_permissions = 'private' if repo_data['is_private'] else 'public'
+ node_permissions = "public" if node.is_public else "private"
+ repo_permissions = (
+ "private" if repo_data["is_private"] else "public"
+ )
if repo_permissions != node_permissions:
message = (
- 'Warning: This OSF {category} is {node_perm}, but the Bitbucket '
- 'repo {user} / {repo} is {repo_perm}.'.format(
+ "Warning: This OSF {category} is {node_perm}, but the Bitbucket "
+ "repo {user} / {repo} is {repo_perm}.".format(
category=markupsafe.escape(node.project_or_component),
node_perm=markupsafe.escape(node_permissions),
repo_perm=markupsafe.escape(repo_permissions),
@@ -333,14 +357,14 @@ def before_page_load(self, node, user):
repo=markupsafe.escape(self.repo),
)
)
- if repo_permissions == 'private':
+ if repo_permissions == "private":
message += (
- ' Users can view the contents of this private Bitbucket '
- 'repository through this public project.'
+ " Users can view the contents of this private Bitbucket "
+ "repository through this public project."
)
else:
message += (
- ' The files in this Bitbucket repo can be viewed on Bitbucket '
+ " The files in this Bitbucket repo can be viewed on Bitbucket "
'
here.'
).format(
user=self.user,
@@ -348,12 +372,10 @@ def before_page_load(self, node, user):
)
messages.append(message)
else:
- message = (
- 'Warning: the Bitbucket repo {user} / {repo} connected to this OSF {category} has been deleted.'.format(
- category=markupsafe.escape(node.project_or_component),
- user=markupsafe.escape(self.user),
- repo=markupsafe.escape(self.repo),
- )
+ message = "Warning: the Bitbucket repo {user} / {repo} connected to this OSF {category} has been deleted.".format(
+ category=markupsafe.escape(node.project_or_component),
+ user=markupsafe.escape(self.user),
+ repo=markupsafe.escape(self.repo),
)
messages.append(message)
@@ -368,11 +390,13 @@ def before_remove_contributor_message(self, node, removed):
"""
try:
- message = (super().before_remove_contributor_message(node, removed) +
- 'You can download the contents of this repository before removing '
- 'this contributor
here.'.format(
- url=node.api_url + 'bitbucket/tarball/'
- ))
+ message = (
+ super().before_remove_contributor_message(node, removed)
+ + "You can download the contents of this repository before removing "
+ 'this contributor
here.'.format(
+ url=node.api_url + "bitbucket/tarball/"
+ )
+ )
except TypeError:
# super call returned None due to lack of user auth
return None
@@ -389,21 +413,20 @@ def after_remove_contributor(self, node, removed, auth=None):
:return str: Alert message
"""
if self.user_settings and self.user_settings.owner == removed:
-
# Delete OAuth tokens
self.user_settings = None
self.save()
message = (
'Because the Bitbucket add-on for {category} "{title}" was authenticated '
- 'by {user}, authentication information has been deleted.'
+ "by {user}, authentication information has been deleted."
).format(
category=markupsafe.escape(node.category_display),
title=markupsafe.escape(node.title),
- user=markupsafe.escape(removed.fullname)
+ user=markupsafe.escape(removed.fullname),
)
if not auth or auth.user != removed:
- url = node.web_url_for('node_setting')
+ url = node.web_url_for("node_setting")
message += (
' You can re-authenticate on the
Settings page.'
).format(url=url)
@@ -421,9 +444,7 @@ def after_fork(self, node, fork, user, save=True):
:param bool save: Save settings after callback
:return tuple: Tuple of cloned settings and alert message
"""
- clone = super().after_fork(
- node, fork, user, save=False
- )
+ clone = super().after_fork(node, fork, user, save=False)
# Copy authentication if authenticated by forking user
if self.user_settings and self.user_settings.owner == user:
@@ -442,9 +463,9 @@ def before_make_public(self, node):
if is_private:
return (
- 'This {cat} is connected to a private Bitbucket repository. Users '
- '(other than contributors) will not be able to see the '
- 'contents of this repo unless it is made public on Bitbucket.'
+ "This {cat} is connected to a private Bitbucket repository. Users "
+ "(other than contributors) will not be able to see the "
+ "contents of this repo unless it is made public on Bitbucket."
).format(
cat=node.project_or_component,
)
diff --git a/addons/bitbucket/routes.py b/addons/bitbucket/routes.py
index 9deb07f2428..6d9d28be2d2 100644
--- a/addons/bitbucket/routes.py
+++ b/addons/bitbucket/routes.py
@@ -3,90 +3,82 @@
from addons.bitbucket import views
api_routes = {
- 'rules': [
-
+ "rules": [
Rule(
[
- '/settings/bitbucket/accounts/',
+ "/settings/bitbucket/accounts/",
],
- 'get',
+ "get",
views.bitbucket_account_list,
json_renderer,
),
-
Rule(
[
- '/project/
/bitbucket/settings/',
- '/project//node//bitbucket/settings/'
+ "/project//bitbucket/settings/",
+ "/project//node//bitbucket/settings/",
],
- 'get',
+ "get",
views.bitbucket_get_config,
json_renderer,
),
-
Rule(
[
- '/project//bitbucket/settings/',
- '/project//node//bitbucket/settings/',
+ "/project//bitbucket/settings/",
+ "/project//node//bitbucket/settings/",
],
- 'post',
+ "post",
views.bitbucket_set_config,
json_renderer,
),
-
Rule(
[
- '/project//bitbucket/user_auth/',
- '/project//node//bitbucket/user_auth/'
+ "/project//bitbucket/user_auth/",
+ "/project//node//bitbucket/user_auth/",
],
- 'put',
+ "put",
views.bitbucket_import_auth,
json_renderer,
),
-
Rule(
[
- '/project//bitbucket/user_auth/',
- '/project//node//bitbucket/user_auth/'
+ "/project//bitbucket/user_auth/",
+ "/project//node//bitbucket/user_auth/",
],
- 'delete',
+ "delete",
views.bitbucket_deauthorize_node,
json_renderer,
),
-
Rule(
[
- '/project//bitbucket/tarball/',
- '/project//node//bitbucket/tarball/',
+ "/project//bitbucket/tarball/",
+ "/project//node//bitbucket/tarball/",
],
- 'get',
+ "get",
views.bitbucket_download_starball,
json_renderer,
- {'archive': 'tar'},
- endpoint_suffix='__tar',
+ {"archive": "tar"},
+ endpoint_suffix="__tar",
),
Rule(
[
- '/project//bitbucket/zipball/',
- '/project//node//bitbucket/zipball/',
+ "/project//bitbucket/zipball/",
+ "/project//node//bitbucket/zipball/",
],
- 'get',
+ "get",
views.bitbucket_download_starball,
json_renderer,
- {'archive': 'zip'},
- endpoint_suffix='__zip',
+ {"archive": "zip"},
+ endpoint_suffix="__zip",
),
-
Rule(
[
- '/project//bitbucket/hgrid/root/',
- '/project//node//bitbucket/hgrid/root/',
+ "/project//bitbucket/hgrid/root/",
+ "/project//node//bitbucket/hgrid/root/",
],
- 'get',
+ "get",
views.bitbucket_root_folder,
json_renderer,
),
-
],
- 'prefix': '/api/v1'
+ "prefix": "/api/v1",
}
diff --git a/addons/bitbucket/serializer.py b/addons/bitbucket/serializer.py
index 65cc76d84b7..77c3c17a5e7 100644
--- a/addons/bitbucket/serializer.py
+++ b/addons/bitbucket/serializer.py
@@ -5,8 +5,7 @@
class BitbucketSerializer(StorageAddonSerializer):
-
- addon_short_name = 'bitbucket'
+ addon_short_name = "bitbucket"
def credentials_are_valid(self, user_settings, client):
try:
@@ -17,19 +16,19 @@ def credentials_are_valid(self, user_settings, client):
def serialized_folder(self, node_settings):
return {
- 'path': node_settings.repo,
- 'name': f'{node_settings.user} / {node_settings.repo}',
+ "path": node_settings.repo,
+ "name": f"{node_settings.user} / {node_settings.repo}",
}
@property
def addon_serialized_urls(self):
node = self.node_settings.owner
return {
- 'auth': api_url_for('oauth_connect', service_name='bitbucket'),
- 'importAuth': node.api_url_for('bitbucket_import_auth'),
- 'files': node.web_url_for('collect_file_trees'),
- 'folders': node.api_url_for('bitbucket_root_folder'),
- 'config': node.api_url_for('bitbucket_set_config'),
- 'deauthorize': node.api_url_for('bitbucket_deauthorize_node'),
- 'accounts': node.api_url_for('bitbucket_account_list'),
+ "auth": api_url_for("oauth_connect", service_name="bitbucket"),
+ "importAuth": node.api_url_for("bitbucket_import_auth"),
+ "files": node.web_url_for("collect_file_trees"),
+ "folders": node.api_url_for("bitbucket_root_folder"),
+ "config": node.api_url_for("bitbucket_set_config"),
+ "deauthorize": node.api_url_for("bitbucket_deauthorize_node"),
+ "accounts": node.api_url_for("bitbucket_account_list"),
}
diff --git a/addons/bitbucket/settings/__init__.py b/addons/bitbucket/settings/__init__.py
index 40f955c5a78..51b1f007605 100644
--- a/addons/bitbucket/settings/__init__.py
+++ b/addons/bitbucket/settings/__init__.py
@@ -7,4 +7,4 @@
try:
from .local import * # noqa
except ImportError:
- logger.warning('No local.py settings file found')
+ logger.warning("No local.py settings file found")
diff --git a/addons/bitbucket/settings/defaults.py b/addons/bitbucket/settings/defaults.py
index db8dd7d086a..49cfb1ccd24 100644
--- a/addons/bitbucket/settings/defaults.py
+++ b/addons/bitbucket/settings/defaults.py
@@ -3,23 +3,23 @@
CLIENT_SECRET = None
# Bitbucket access scope
-SCOPE = ['account', 'repository', 'team']
+SCOPE = ["account", "repository", "team"]
# Bitbucket hook domain
HOOK_DOMAIN = None
-HOOK_CONTENT_TYPE = 'json'
-HOOK_EVENTS = ['push'] # Only log commits
+HOOK_CONTENT_TYPE = "json"
+HOOK_EVENTS = ["push"] # Only log commits
# OAuth related urls
-OAUTH_AUTHORIZE_URL = 'https://bitbucket.org/site/oauth2/authorize'
-OAUTH_ACCESS_TOKEN_URL = 'https://bitbucket.org/site/oauth2/access_token'
+OAUTH_AUTHORIZE_URL = "https://bitbucket.org/site/oauth2/authorize"
+OAUTH_ACCESS_TOKEN_URL = "https://bitbucket.org/site/oauth2/access_token"
# Max render size in bytes; no max if None
MAX_RENDER_SIZE = None
CACHE = False
-BITBUCKET_V2_API_URL = 'https://api.bitbucket.org/2.0'
+BITBUCKET_V2_API_URL = "https://api.bitbucket.org/2.0"
REFRESH_TIME = 5 * 60
EXPIRY_TIME = 0
diff --git a/addons/bitbucket/settings/local-dist.py b/addons/bitbucket/settings/local-dist.py
index 55fae4d62fa..2bcfb17ea4b 100644
--- a/addons/bitbucket/settings/local-dist.py
+++ b/addons/bitbucket/settings/local-dist.py
@@ -1,3 +1,3 @@
# Bitbucket application credentials
-CLIENT_ID = '' # called 'Key'
-CLIENT_SECRET = '' # called 'Secret'
+CLIENT_ID = "" # called 'Key'
+CLIENT_SECRET = "" # called 'Secret'
diff --git a/addons/bitbucket/tests/factories.py b/addons/bitbucket/tests/factories.py
index 7220c54cbd7..f529960e88a 100644
--- a/addons/bitbucket/tests/factories.py
+++ b/addons/bitbucket/tests/factories.py
@@ -1,15 +1,19 @@
from factory import Sequence, SubFactory
from factory.django import DjangoModelFactory
-from osf_tests.factories import ExternalAccountFactory, ProjectFactory, UserFactory
+from osf_tests.factories import (
+ ExternalAccountFactory,
+ ProjectFactory,
+ UserFactory,
+)
from addons.bitbucket.models import NodeSettings, UserSettings
class BitbucketAccountFactory(ExternalAccountFactory):
- provider = 'bitbucket'
- provider_id = Sequence(lambda n: f'id-{n}')
- oauth_key = Sequence(lambda n: f'key-{n}')
- display_name = 'abc'
+ provider = "bitbucket"
+ provider_id = Sequence(lambda n: f"id-{n}")
+ oauth_key = Sequence(lambda n: f"key-{n}")
+ display_name = "abc"
class BitbucketUserSettingsFactory(DjangoModelFactory):
diff --git a/addons/bitbucket/tests/test_models.py b/addons/bitbucket/tests/test_models.py
index bfa759e23e5..3ff85992f24 100644
--- a/addons/bitbucket/tests/test_models.py
+++ b/addons/bitbucket/tests/test_models.py
@@ -4,7 +4,6 @@
from tests.base import OsfTestCase, get_default_metaschema
from osf_tests.factories import (
- ExternalAccountFactory,
ProjectFactory,
UserFactory,
DraftRegistrationFactory,
@@ -13,22 +12,22 @@
from framework.auth import Auth
from addons.bitbucket.exceptions import NotFoundError
-from addons.bitbucket import settings as bitbucket_settings
from addons.bitbucket.models import NodeSettings
from addons.bitbucket.tests.factories import (
BitbucketAccountFactory,
BitbucketNodeSettingsFactory,
- BitbucketUserSettingsFactory
+ BitbucketUserSettingsFactory,
)
from addons.base.tests import models
pytestmark = pytest.mark.django_db
-class TestNodeSettings(models.OAuthAddonNodeSettingsTestSuiteMixin, unittest.TestCase):
-
- short_name = 'bitbucket'
- full_name = 'Bitbucket'
+class TestNodeSettings(
+ models.OAuthAddonNodeSettingsTestSuiteMixin, unittest.TestCase
+):
+ short_name = "bitbucket"
+ full_name = "Bitbucket"
ExternalAccountFactory = BitbucketAccountFactory
NodeSettingsFactory = BitbucketNodeSettingsFactory
@@ -39,10 +38,10 @@ class TestNodeSettings(models.OAuthAddonNodeSettingsTestSuiteMixin, unittest.Tes
def _node_settings_class_kwargs(self, node, user_settings):
return {
- 'user_settings': self.user_settings,
- 'repo': 'mock',
- 'user': 'abc',
- 'owner': self.node
+ "user_settings": self.user_settings,
+ "repo": "mock",
+ "user": "abc",
+ "owner": self.node,
}
def test_set_folder(self):
@@ -54,63 +53,68 @@ def test_serialize_settings(self):
# Bitbucket's serialized_settings are a little different from
# common storage addons.
settings = self.node_settings.serialize_waterbutler_settings()
- expected = {'owner': self.node_settings.user, 'repo': self.node_settings.repo}
+ expected = {
+ "owner": self.node_settings.user,
+ "repo": self.node_settings.repo,
+ }
assert settings == expected
@mock.patch(
- 'addons.bitbucket.models.UserSettings.revoke_remote_oauth_access',
- mock.PropertyMock()
+ "addons.bitbucket.models.UserSettings.revoke_remote_oauth_access",
+ mock.PropertyMock(),
)
def test_complete_has_auth_not_verified(self):
super().test_complete_has_auth_not_verified()
- @mock.patch('addons.bitbucket.api.BitbucketClient.repos')
- @mock.patch('addons.bitbucket.api.BitbucketClient.team_repos')
+ @mock.patch("addons.bitbucket.api.BitbucketClient.repos")
+ @mock.patch("addons.bitbucket.api.BitbucketClient.team_repos")
def test_to_json(self, mock_repos, mock_team_repos):
mock_repos.return_value = []
mock_team_repos.return_value = []
super().test_to_json()
- @mock.patch('addons.bitbucket.api.BitbucketClient.repos')
- @mock.patch('addons.bitbucket.api.BitbucketClient.team_repos')
+ @mock.patch("addons.bitbucket.api.BitbucketClient.repos")
+ @mock.patch("addons.bitbucket.api.BitbucketClient.team_repos")
def test_to_json_user_is_owner(self, mock_repos, mock_team_repos):
mock_repos.return_value = []
mock_team_repos.return_value = []
result = self.node_settings.to_json(self.user)
- assert result['user_has_auth']
- assert result['bitbucket_user'] == 'abc'
- assert result['is_owner']
- assert result['valid_credentials']
- assert result.get('repo_names', None) == []
-
- @mock.patch('addons.bitbucket.api.BitbucketClient.repos')
- @mock.patch('addons.bitbucket.api.BitbucketClient.team_repos')
+ assert result["user_has_auth"]
+ assert result["bitbucket_user"] == "abc"
+ assert result["is_owner"]
+ assert result["valid_credentials"]
+ assert result.get("repo_names", None) == []
+
+ @mock.patch("addons.bitbucket.api.BitbucketClient.repos")
+ @mock.patch("addons.bitbucket.api.BitbucketClient.team_repos")
def test_to_json_user_is_not_owner(self, mock_repos, mock_team_repos):
mock_repos.return_value = []
mock_team_repos.return_value = []
not_owner = UserFactory()
result = self.node_settings.to_json(not_owner)
- assert not result['user_has_auth']
- assert result['bitbucket_user'] == 'abc'
- assert not result['is_owner']
- assert result['valid_credentials']
- assert result.get('repo_names', None) == None
-
-
-class TestUserSettings(models.OAuthAddonUserSettingTestSuiteMixin, unittest.TestCase):
-
- short_name = 'bitbucket'
- full_name = 'Bitbucket'
+ assert not result["user_has_auth"]
+ assert result["bitbucket_user"] == "abc"
+ assert not result["is_owner"]
+ assert result["valid_credentials"]
+ assert result.get("repo_names", None) == None
+
+
+class TestUserSettings(
+ models.OAuthAddonUserSettingTestSuiteMixin, unittest.TestCase
+):
+ short_name = "bitbucket"
+ full_name = "Bitbucket"
ExternalAccountFactory = BitbucketAccountFactory
def test_public_id(self):
- assert self.user.external_accounts.first().display_name == self.user_settings.public_id
+ assert (
+ self.user.external_accounts.first().display_name
+ == self.user_settings.public_id
+ )
class TestCallbacks(OsfTestCase):
-
def setUp(self):
-
super().setUp()
self.project = ProjectFactory()
@@ -121,81 +125,99 @@ def setUp(self):
auth=self.consolidated_auth,
)
- self.project.add_addon('bitbucket', auth=self.consolidated_auth)
- self.project.creator.add_addon('bitbucket')
+ self.project.add_addon("bitbucket", auth=self.consolidated_auth)
+ self.project.creator.add_addon("bitbucket")
self.external_account = BitbucketAccountFactory()
self.project.creator.external_accounts.add(self.external_account)
- self.node_settings = self.project.get_addon('bitbucket')
- self.user_settings = self.project.creator.get_addon('bitbucket')
+ self.node_settings = self.project.get_addon("bitbucket")
+ self.user_settings = self.project.creator.get_addon("bitbucket")
self.node_settings.user_settings = self.user_settings
- self.node_settings.user = 'Queen'
- self.node_settings.repo = 'Sheer-Heart-Attack'
+ self.node_settings.user = "Queen"
+ self.node_settings.repo = "Sheer-Heart-Attack"
self.node_settings.external_account = self.external_account
self.node_settings.save()
self.node_settings.set_auth
- self.user_settings.oauth_grants[self.project._id] = {self.external_account._id: []}
+ self.user_settings.oauth_grants[self.project._id] = {
+ self.external_account._id: []
+ }
self.user_settings.save()
- @mock.patch('addons.bitbucket.api.BitbucketClient.repo')
+ @mock.patch("addons.bitbucket.api.BitbucketClient.repo")
def test_before_make_public(self, mock_repo):
mock_repo.side_effect = NotFoundError
result = self.node_settings.before_make_public(self.project)
assert result is None
- @mock.patch('addons.bitbucket.api.BitbucketClient.repo')
+ @mock.patch("addons.bitbucket.api.BitbucketClient.repo")
def test_before_page_load_osf_public_bb_public(self, mock_repo):
self.project.is_public = True
self.project.save()
- mock_repo.return_value = {'is_private': False}
- message = self.node_settings.before_page_load(self.project, self.project.creator)
+ mock_repo.return_value = {"is_private": False}
+ message = self.node_settings.before_page_load(
+ self.project, self.project.creator
+ )
mock_repo.assert_called_with(
user=self.node_settings.user,
repo=self.node_settings.repo,
)
assert not message
- @mock.patch('addons.bitbucket.api.BitbucketClient.repo')
+ @mock.patch("addons.bitbucket.api.BitbucketClient.repo")
def test_before_page_load_osf_public_bb_private(self, mock_repo):
self.project.is_public = True
self.project.save()
- mock_repo.return_value = {'is_private': True}
- message = self.node_settings.before_page_load(self.project, self.project.creator)
+ mock_repo.return_value = {"is_private": True}
+ message = self.node_settings.before_page_load(
+ self.project, self.project.creator
+ )
mock_repo.assert_called_with(
user=self.node_settings.user,
repo=self.node_settings.repo,
)
assert message
- assert 'Users can view the contents of this private Bitbucket repository through this public project.' in message[0]
+ assert (
+ "Users can view the contents of this private Bitbucket repository through this public project."
+ in message[0]
+ )
- @mock.patch('addons.bitbucket.api.BitbucketClient.repo')
+ @mock.patch("addons.bitbucket.api.BitbucketClient.repo")
def test_before_page_load_repo_deleted(self, mock_repo):
self.project.is_public = True
self.project.save()
mock_repo.return_value = None
- message = self.node_settings.before_page_load(self.project, self.project.creator)
+ message = self.node_settings.before_page_load(
+ self.project, self.project.creator
+ )
mock_repo.assert_called_with(
user=self.node_settings.user,
repo=self.node_settings.repo,
)
assert message
- assert 'has been deleted.' in message[0]
+ assert "has been deleted." in message[0]
- @mock.patch('addons.bitbucket.api.BitbucketClient.repo')
+ @mock.patch("addons.bitbucket.api.BitbucketClient.repo")
def test_before_page_load_osf_private_bb_public(self, mock_repo):
- mock_repo.return_value = {'is_private': False}
- message = self.node_settings.before_page_load(self.project, self.project.creator)
+ mock_repo.return_value = {"is_private": False}
+ message = self.node_settings.before_page_load(
+ self.project, self.project.creator
+ )
mock_repo.assert_called_with(
user=self.node_settings.user,
repo=self.node_settings.repo,
)
assert message
- assert 'The files in this Bitbucket repo can be viewed on Bitbucket' in message[0]
+ assert (
+ "The files in this Bitbucket repo can be viewed on Bitbucket"
+ in message[0]
+ )
- @mock.patch('addons.bitbucket.api.BitbucketClient.repo')
+ @mock.patch("addons.bitbucket.api.BitbucketClient.repo")
def test_before_page_load_osf_private_bb_private(self, mock_repo):
- mock_repo.return_value = {'is_private': True}
- message = self.node_settings.before_page_load(self.project, self.project.creator)
+ mock_repo.return_value = {"is_private": True}
+ message = self.node_settings.before_page_load(
+ self.project, self.project.creator
+ )
mock_repo.assert_called_with(
user=self.node_settings.user,
repo=self.node_settings.repo,
@@ -203,7 +225,9 @@ def test_before_page_load_osf_private_bb_private(self, mock_repo):
assert not message
def test_before_page_load_not_contributor(self):
- message = self.node_settings.before_page_load(self.project, UserFactory())
+ message = self.node_settings.before_page_load(
+ self.project, UserFactory()
+ )
assert not message
def test_before_page_load_not_logged_in(self):
@@ -228,7 +252,7 @@ def test_after_remove_contributor_authenticator_self(self):
)
assert self.node_settings.user_settings is None
assert message
- assert 'You can re-authenticate' not in message
+ assert "You can re-authenticate" not in message
def test_after_remove_contributor_authenticator_not_self(self):
auth = Auth(user=self.non_authenticator)
@@ -237,7 +261,7 @@ def test_after_remove_contributor_authenticator_not_self(self):
)
assert self.node_settings.user_settings is None
assert message
- assert 'You can re-authenticate' in message
+ assert "You can re-authenticate" in message
def test_after_remove_contributor_not_authenticator(self):
self.node_settings.after_remove_contributor(
@@ -248,14 +272,18 @@ def test_after_remove_contributor_not_authenticator(self):
def test_after_fork_authenticator(self):
fork = ProjectFactory()
clone = self.node_settings.after_fork(
- self.project, fork, self.project.creator,
+ self.project,
+ fork,
+ self.project.creator,
)
assert self.node_settings.user_settings == clone.user_settings
def test_after_fork_not_authenticator(self):
fork = ProjectFactory()
clone = self.node_settings.after_fork(
- self.project, fork, self.non_authenticator,
+ self.project,
+ fork,
+ self.non_authenticator,
)
assert clone.user_settings is None
@@ -265,11 +293,13 @@ def test_after_delete(self):
self.node_settings.reload()
assert self.node_settings.user_settings is None
- @mock.patch('website.archiver.tasks.archive')
+ @mock.patch("website.archiver.tasks.archive")
def test_does_not_get_copied_to_registrations(self, mock_archive):
registration = self.project.register_node(
schema=get_default_metaschema(),
auth=Auth(user=self.project.creator),
- draft_registration=DraftRegistrationFactory(branched_from=self.project),
+ draft_registration=DraftRegistrationFactory(
+ branched_from=self.project
+ ),
)
- assert not registration.has_addon('bitbucket')
+ assert not registration.has_addon("bitbucket")
diff --git a/addons/bitbucket/tests/test_serializer.py b/addons/bitbucket/tests/test_serializer.py
index 008625ab909..f2e400a0db9 100644
--- a/addons/bitbucket/tests/test_serializer.py
+++ b/addons/bitbucket/tests/test_serializer.py
@@ -1,6 +1,5 @@
"""Serializer tests for the Bitbucket addon."""
-from unittest import mock
import pytest
from tests.base import OsfTestCase
@@ -11,9 +10,11 @@
pytestmark = pytest.mark.django_db
-class TestBitbucketSerializer(StorageAddonSerializerTestSuiteMixin, OsfTestCase):
- addon_short_name = 'bitbucket'
+class TestBitbucketSerializer(
+ StorageAddonSerializerTestSuiteMixin, OsfTestCase
+):
+ addon_short_name = "bitbucket"
Serializer = BitbucketSerializer
ExternalAccountFactory = BitbucketAccountFactory
diff --git a/addons/bitbucket/tests/test_views.py b/addons/bitbucket/tests/test_views.py
index 51cac6012b5..e55f30ec93b 100644
--- a/addons/bitbucket/tests/test_views.py
+++ b/addons/bitbucket/tests/test_views.py
@@ -1,7 +1,6 @@
from rest_framework import status as http_status
from unittest import mock
-import datetime
import unittest
import pytest
@@ -16,30 +15,36 @@
from framework.exceptions import HTTPError
from framework.auth import Auth
-from website.util import api_url_for
from addons.base.tests.views import (
- OAuthAddonAuthViewsTestCaseMixin, OAuthAddonConfigViewsTestCaseMixin
+ OAuthAddonAuthViewsTestCaseMixin,
+ OAuthAddonConfigViewsTestCaseMixin,
)
from addons.bitbucket import utils
from addons.bitbucket.api import BitbucketClient
-from addons.bitbucket.models import BitbucketProvider
from addons.bitbucket.serializer import BitbucketSerializer
from addons.bitbucket.tests.factories import BitbucketAccountFactory
-from addons.bitbucket.tests.utils import BitbucketAddonTestCase, create_mock_bitbucket
+from addons.bitbucket.tests.utils import (
+ BitbucketAddonTestCase,
+ create_mock_bitbucket,
+)
pytestmark = pytest.mark.django_db
-class TestBitbucketAuthViews(BitbucketAddonTestCase, OAuthAddonAuthViewsTestCaseMixin, OsfTestCase):
+class TestBitbucketAuthViews(
+ BitbucketAddonTestCase, OAuthAddonAuthViewsTestCaseMixin, OsfTestCase
+):
@mock.patch(
- 'addons.bitbucket.models.UserSettings.revoke_remote_oauth_access',
- mock.PropertyMock()
+ "addons.bitbucket.models.UserSettings.revoke_remote_oauth_access",
+ mock.PropertyMock(),
)
def test_delete_external_account(self):
super().test_delete_external_account()
-class TestBitbucketConfigViews(BitbucketAddonTestCase, OAuthAddonConfigViewsTestCaseMixin, OsfTestCase):
+class TestBitbucketConfigViews(
+ BitbucketAddonTestCase, OAuthAddonConfigViewsTestCaseMixin, OsfTestCase
+):
folder = None
Serializer = BitbucketSerializer
client = BitbucketClient
@@ -48,7 +53,9 @@ class TestBitbucketConfigViews(BitbucketAddonTestCase, OAuthAddonConfigViewsTest
def setUp(self):
super().setUp()
- self.mock_access_token = mock.patch('addons.bitbucket.models.BitbucketProvider.fetch_access_token')
+ self.mock_access_token = mock.patch(
+ "addons.bitbucket.models.BitbucketProvider.fetch_access_token"
+ )
self.mock_access_token.return_value = mock.Mock()
self.mock_access_token.start()
@@ -60,24 +67,30 @@ def test_folder_list(self):
# BB only lists root folder (repos), this test is superfluous
pass
- @mock.patch('addons.bitbucket.views.BitbucketClient.repo')
- @mock.patch('addons.bitbucket.models.NodeSettings.external_account')
+ @mock.patch("addons.bitbucket.views.BitbucketClient.repo")
+ @mock.patch("addons.bitbucket.models.NodeSettings.external_account")
def test_set_config(self, mock_account, mock_repo):
# BB selects repos, not folders, so this needs to be overriden
mock_account.return_value = mock.Mock()
- mock_repo.return_value = 'repo_name'
- url = self.project.api_url_for(f'{self.ADDON_SHORT_NAME}_set_config')
- res = self.app.post(url, json={
- 'bitbucket_user': 'octocat',
- 'bitbucket_repo': 'repo_name',
- }, auth=self.user.auth)
+ mock_repo.return_value = "repo_name"
+ url = self.project.api_url_for(f"{self.ADDON_SHORT_NAME}_set_config")
+ res = self.app.post(
+ url,
+ json={
+ "bitbucket_user": "octocat",
+ "bitbucket_repo": "repo_name",
+ },
+ auth=self.user.auth,
+ )
assert res.status_code == http_status.HTTP_200_OK
self.project.reload()
- assert self.project.logs.latest().action == f'{self.ADDON_SHORT_NAME}_repo_linked'
+ assert (
+ self.project.logs.latest().action
+ == f"{self.ADDON_SHORT_NAME}_repo_linked"
+ )
class TestBitbucketViews(OsfTestCase):
-
def setUp(self):
super().setUp()
self.user = AuthUserFactory()
@@ -93,22 +106,26 @@ def setUp(self):
self.external_account = BitbucketAccountFactory()
- self.project.add_addon('bitbucket', auth=self.consolidated_auth)
- self.project.creator.add_addon('bitbucket')
+ self.project.add_addon("bitbucket", auth=self.consolidated_auth)
+ self.project.creator.add_addon("bitbucket")
self.project.creator.external_accounts.add(self.external_account)
self.project.creator.save()
- self.bitbucket = create_mock_bitbucket(user='fred', private=False)
+ self.bitbucket = create_mock_bitbucket(user="fred", private=False)
- self.user_settings = self.project.creator.get_addon('bitbucket')
- self.user_settings.oauth_grants[self.project._id] = {self.external_account._id: []}
+ self.user_settings = self.project.creator.get_addon("bitbucket")
+ self.user_settings.oauth_grants[self.project._id] = {
+ self.external_account._id: []
+ }
self.user_settings.save()
- self.node_settings = self.project.get_addon('bitbucket')
+ self.node_settings = self.project.get_addon("bitbucket")
self.node_settings.user_settings = self.user_settings
self.node_settings.external_account = self.external_account
- self.node_settings.user = self.bitbucket.repo.return_value['owner']['username']
- self.node_settings.repo = self.bitbucket.repo.return_value['name']
+ self.node_settings.user = self.bitbucket.repo.return_value["owner"][
+ "username"
+ ]
+ self.node_settings.repo = self.bitbucket.repo.return_value["name"]
self.node_settings.save()
def _get_sha_for_branch(self, branch=None, mock_branches=None):
@@ -118,126 +135,137 @@ def _get_sha_for_branch(self, branch=None, mock_branches=None):
if branch is None: # Get default branch name
branch = self.bitbucket.repo_default_branch.return_value
for each in mock_branches.return_value:
- if each['name'] == branch:
- branch_sha = each['target']['hash']
+ if each["name"] == branch:
+ branch_sha = each["target"]["hash"]
return branch_sha
# Tests for _get_refs
- @mock.patch('addons.bitbucket.api.BitbucketClient.branches')
- @mock.patch('addons.bitbucket.api.BitbucketClient.repo')
- @mock.patch('addons.bitbucket.api.BitbucketClient.repo_default_branch')
- @mock.patch('addons.bitbucket.models.NodeSettings.external_account')
- def test_get_refs_defaults(self, mock_account, mock_default_branch, mock_repo, mock_branches):
+ @mock.patch("addons.bitbucket.api.BitbucketClient.branches")
+ @mock.patch("addons.bitbucket.api.BitbucketClient.repo")
+ @mock.patch("addons.bitbucket.api.BitbucketClient.repo_default_branch")
+ @mock.patch("addons.bitbucket.models.NodeSettings.external_account")
+ def test_get_refs_defaults(
+ self, mock_account, mock_default_branch, mock_repo, mock_branches
+ ):
bitbucket_mock = self.bitbucket
mock_account.return_value = mock.Mock()
- mock_default_branch.return_value = bitbucket_mock.repo_default_branch.return_value
+ mock_default_branch.return_value = (
+ bitbucket_mock.repo_default_branch.return_value
+ )
mock_repo.return_value = bitbucket_mock.repo.return_value
mock_branches.return_value = bitbucket_mock.branches.return_value
branch, sha, branches = utils.get_refs(self.node_settings)
assert branch == bitbucket_mock.repo_default_branch.return_value
- assert sha == self._get_sha_for_branch(branch=None) # Get refs for default branch
+ assert sha == self._get_sha_for_branch(
+ branch=None
+ ) # Get refs for default branch
expected_branches = [
- {'name': x['name'], 'sha': x['target']['hash']}
+ {"name": x["name"], "sha": x["target"]["hash"]}
for x in bitbucket_mock.branches.return_value
]
assert branches == expected_branches
- @mock.patch('addons.bitbucket.api.BitbucketClient.branches')
- @mock.patch('addons.bitbucket.api.BitbucketClient.repo')
- @mock.patch('addons.bitbucket.api.BitbucketClient.repo_default_branch')
- @mock.patch('addons.bitbucket.models.NodeSettings.external_account')
- def test_get_refs_branch(self, mock_account, mock_default_branch, mock_repo, mock_branches):
+ @mock.patch("addons.bitbucket.api.BitbucketClient.branches")
+ @mock.patch("addons.bitbucket.api.BitbucketClient.repo")
+ @mock.patch("addons.bitbucket.api.BitbucketClient.repo_default_branch")
+ @mock.patch("addons.bitbucket.models.NodeSettings.external_account")
+ def test_get_refs_branch(
+ self, mock_account, mock_default_branch, mock_repo, mock_branches
+ ):
bitbucket_mock = self.bitbucket
mock_account.return_value = mock.Mock()
- mock_default_branch.return_value = bitbucket_mock.repo_default_branch.return_value
+ mock_default_branch.return_value = (
+ bitbucket_mock.repo_default_branch.return_value
+ )
mock_repo.return_value = bitbucket_mock.repo.return_value
mock_branches.return_value = bitbucket_mock.branches.return_value
- branch, sha, branches = utils.get_refs(self.node_settings, 'master')
- assert branch == 'master'
- branch_sha = self._get_sha_for_branch('master')
+ branch, sha, branches = utils.get_refs(self.node_settings, "master")
+ assert branch == "master"
+ branch_sha = self._get_sha_for_branch("master")
assert sha == branch_sha
expected_branches = [
- {'name': x['name'], 'sha': x['target']['hash']}
+ {"name": x["name"], "sha": x["target"]["hash"]}
for x in bitbucket_mock.branches.return_value
]
assert branches == expected_branches
def test_before_fork(self):
- url = self.project.api_url + 'fork/before/'
+ url = self.project.api_url + "fork/before/"
res = self.app.get(url, auth=self.user.auth, follow_redirects=True)
- assert len(res.json['prompts']) == 1
+ assert len(res.json["prompts"]) == 1
def test_before_register(self):
- url = self.project.api_url + 'beforeregister/'
+ url = self.project.api_url + "beforeregister/"
res = self.app.get(url, auth=self.user.auth, follow_redirects=True)
- assert 'Bitbucket' in res.json['prompts'][1]
+ assert "Bitbucket" in res.json["prompts"][1]
- @mock.patch('addons.bitbucket.models.NodeSettings.external_account')
+ @mock.patch("addons.bitbucket.models.NodeSettings.external_account")
def test_get_refs_sha_no_branch(self, mock_account):
with pytest.raises(HTTPError):
- utils.get_refs(self.node_settings, sha='12345')
+ utils.get_refs(self.node_settings, sha="12345")
def check_hook_urls(self, urls, node, path, sha):
- url = node.web_url_for('addon_view_or_download_file', path=path, provider='bitbucket')
+ url = node.web_url_for(
+ "addon_view_or_download_file", path=path, provider="bitbucket"
+ )
expected_urls = {
- 'view': f'{url}?ref={sha}',
- 'download': f'{url}?action=download&ref={sha}'
+ "view": f"{url}?ref={sha}",
+ "download": f"{url}?action=download&ref={sha}",
}
- assert urls['view'] == expected_urls['view']
- assert urls['download'] == expected_urls['download']
+ assert urls["view"] == expected_urls["view"]
+ assert urls["download"] == expected_urls["download"]
class TestBitbucketSettings(OsfTestCase):
-
def setUp(self):
-
super().setUp()
- self.bitbucket = create_mock_bitbucket(user='fred', private=False)
+ self.bitbucket = create_mock_bitbucket(user="fred", private=False)
self.project = ProjectFactory()
self.project.save()
self.auth = self.project.creator.auth
self.consolidated_auth = Auth(user=self.project.creator)
- self.project.add_addon('bitbucket', auth=self.consolidated_auth)
- self.project.creator.add_addon('bitbucket')
- self.node_settings = self.project.get_addon('bitbucket')
- self.user_settings = self.project.creator.get_addon('bitbucket')
+ self.project.add_addon("bitbucket", auth=self.consolidated_auth)
+ self.project.creator.add_addon("bitbucket")
+ self.node_settings = self.project.get_addon("bitbucket")
+ self.user_settings = self.project.creator.get_addon("bitbucket")
self.node_settings.user_settings = self.user_settings
- self.node_settings.user = 'Queen'
- self.node_settings.repo = 'Sheer-Heart-Attack'
+ self.node_settings.user = "Queen"
+ self.node_settings.repo = "Sheer-Heart-Attack"
self.node_settings.save()
- @mock.patch('addons.bitbucket.api.BitbucketClient.repo')
- @mock.patch('addons.bitbucket.models.NodeSettings.external_account')
+ @mock.patch("addons.bitbucket.api.BitbucketClient.repo")
+ @mock.patch("addons.bitbucket.models.NodeSettings.external_account")
def test_link_repo(self, mock_account, mock_repo):
bitbucket_mock = self.bitbucket
mock_account.return_value = mock.Mock()
mock_repo.return_value = bitbucket_mock.repo.return_value
- url = self.project.api_url + 'bitbucket/settings/'
+ url = self.project.api_url + "bitbucket/settings/"
self.app.post(
url,
json={
- 'bitbucket_user': 'queen',
- 'bitbucket_repo': 'night at the opera',
+ "bitbucket_user": "queen",
+ "bitbucket_repo": "night at the opera",
},
- auth=self.auth, follow_redirects=True
+ auth=self.auth,
+ follow_redirects=True,
)
self.project.reload()
self.node_settings.reload()
- assert self.node_settings.user == 'queen'
- assert self.node_settings.repo == 'night at the opera'
- assert self.project.logs.latest().action == 'bitbucket_repo_linked'
+ assert self.node_settings.user == "queen"
+ assert self.node_settings.repo == "night at the opera"
+ assert self.project.logs.latest().action == "bitbucket_repo_linked"
- @mock.patch('addons.bitbucket.api.BitbucketClient.repo')
- @mock.patch('addons.bitbucket.models.NodeSettings.external_account')
+ @mock.patch("addons.bitbucket.api.BitbucketClient.repo")
+ @mock.patch("addons.bitbucket.models.NodeSettings.external_account")
def test_link_repo_no_change(self, mock_account, mock_repo):
bitbucket_mock = self.bitbucket
mock_account.return_value = mock.Mock()
@@ -245,15 +273,15 @@ def test_link_repo_no_change(self, mock_account, mock_repo):
log_count = self.project.logs.count()
- url = self.project.api_url + 'bitbucket/settings/'
+ url = self.project.api_url + "bitbucket/settings/"
self.app.post(
url,
json={
- 'bitbucket_user': 'Queen',
- 'bitbucket_repo': 'Sheer-Heart-Attack',
+ "bitbucket_user": "Queen",
+ "bitbucket_repo": "Sheer-Heart-Attack",
},
auth=self.auth,
- follow_redirects=True
+ follow_redirects=True,
)
self.project.reload()
@@ -261,26 +289,26 @@ def test_link_repo_no_change(self, mock_account, mock_repo):
assert self.project.logs.count() == log_count
- @mock.patch('addons.bitbucket.api.BitbucketClient.repo')
- @mock.patch('addons.bitbucket.models.NodeSettings.external_account')
+ @mock.patch("addons.bitbucket.api.BitbucketClient.repo")
+ @mock.patch("addons.bitbucket.models.NodeSettings.external_account")
def test_link_repo_non_existent(self, mock_account, mock_repo):
mock_account.return_value = mock.Mock()
mock_repo.return_value = None
- url = self.project.api_url + 'bitbucket/settings/'
+ url = self.project.api_url + "bitbucket/settings/"
res = self.app.post(
url,
json={
- 'bitbucket_user': 'queen',
- 'bitbucket_repo': 'night at the opera',
+ "bitbucket_user": "queen",
+ "bitbucket_repo": "night at the opera",
},
auth=self.auth,
- follow_redirects=True
+ follow_redirects=True,
)
assert res.status_code == 400
- @mock.patch('addons.bitbucket.api.BitbucketClient.branches')
+ @mock.patch("addons.bitbucket.api.BitbucketClient.branches")
def test_link_repo_registration(self, mock_branches):
bitbucket_mock = self.bitbucket
mock_branches.return_value = bitbucket_mock.branches.return_value
@@ -288,25 +316,26 @@ def test_link_repo_registration(self, mock_branches):
registration = self.project.register_node(
schema=get_default_metaschema(),
auth=self.consolidated_auth,
- draft_registration=DraftRegistrationFactory(branched_from=self.project)
+ draft_registration=DraftRegistrationFactory(
+ branched_from=self.project
+ ),
)
- url = registration.api_url + 'bitbucket/settings/'
+ url = registration.api_url + "bitbucket/settings/"
res = self.app.post(
url,
json={
- 'bitbucket_user': 'queen',
- 'bitbucket_repo': 'night at the opera',
+ "bitbucket_user": "queen",
+ "bitbucket_repo": "night at the opera",
},
auth=self.auth,
- follow_redirects=True
+ follow_redirects=True,
)
assert res.status_code == 400
def test_deauthorize(self):
-
- url = self.project.api_url + 'bitbucket/user_auth/'
+ url = self.project.api_url + "bitbucket/user_auth/"
self.app.delete(url, auth=self.auth, follow_redirects=True)
@@ -316,8 +345,10 @@ def test_deauthorize(self):
assert self.node_settings.repo == None
assert self.node_settings.user_settings == None
- assert self.project.logs.latest().action == 'bitbucket_node_deauthorized'
+ assert (
+ self.project.logs.latest().action == "bitbucket_node_deauthorized"
+ )
-if __name__ == '__main__':
+if __name__ == "__main__":
unittest.main()
diff --git a/addons/bitbucket/tests/utils.py b/addons/bitbucket/tests/utils.py
index bfedc8d0ac6..1e8433bfdbb 100644
--- a/addons/bitbucket/tests/utils.py
+++ b/addons/bitbucket/tests/utils.py
@@ -8,16 +8,17 @@
class BitbucketAddonTestCase(OAuthAddonTestCaseMixin, AddonTestCase):
- ADDON_SHORT_NAME = 'bitbucket'
+ ADDON_SHORT_NAME = "bitbucket"
ExternalAccountFactory = BitbucketAccountFactory
Provider = BitbucketProvider
def set_node_settings(self, settings):
super().set_node_settings(settings)
- settings.repo = 'abc'
- settings.user = 'octo-cat'
+ settings.repo = "abc"
+ settings.user = "octo-cat"
-def create_mock_bitbucket(user='octo-cat', private=False):
+
+def create_mock_bitbucket(user="octo-cat", private=False):
"""Factory for mock BitbucketClients objects.
Example: ::
"""
@@ -25,31 +26,31 @@ def create_mock_bitbucket(user='octo-cat', private=False):
bitbucket_mock = mock.create_autospec(BitbucketClient)
bitbucket_mock.username.return_value = user
bitbucket_mock.user.return_value = { # TODO: needs filling out
- 'username': user,
- 'uuid': '1234-3324',
- 'links': {'html': {'ref': 'https://nope.example.org/profile.html'}},
+ "username": user,
+ "uuid": "1234-3324",
+ "links": {"html": {"ref": "https://nope.example.org/profile.html"}},
}
bitbucket_mock.repo.return_value = {
- 'name': 'cow-problems-app',
- 'is_private': private,
- 'owner': {'username': user},
+ "name": "cow-problems-app",
+ "is_private": private,
+ "owner": {"username": user},
}
bitbucket_mock.repos.return_value = [
- {'full_name': f'{user}/cow-problems-app'},
- {'full_name': f'{user}/duck-problems-app'},
- {'full_name': f'{user}/horse-problems-app'},
+ {"full_name": f"{user}/cow-problems-app"},
+ {"full_name": f"{user}/duck-problems-app"},
+ {"full_name": f"{user}/horse-problems-app"},
]
bitbucket_mock.team_repos.return_value = [
- {'full_name': 'team-barn-devs/pig-problems-app'},
- {'full_name': 'team-barn-devs/goat-problems-app'},
- {'full_name': 'team-barn-devs/goose-problems-app'},
+ {"full_name": "team-barn-devs/pig-problems-app"},
+ {"full_name": "team-barn-devs/goat-problems-app"},
+ {"full_name": "team-barn-devs/goose-problems-app"},
]
- bitbucket_mock.repo_default_branch.return_value = 'master'
+ bitbucket_mock.repo_default_branch.return_value = "master"
bitbucket_mock.branches.return_value = [
- {'name': 'master', 'target': {'hash': 'a1b2c3d4'}},
- {'name': 'develop', 'target': {'hash': '0f9e8d7c'}},
+ {"name": "master", "target": {"hash": "a1b2c3d4"}},
+ {"name": "develop", "target": {"hash": "0f9e8d7c"}},
]
return bitbucket_mock
diff --git a/addons/bitbucket/utils.py b/addons/bitbucket/utils.py
index d289e056669..6d7c9e23aab 100644
--- a/addons/bitbucket/utils.py
+++ b/addons/bitbucket/utils.py
@@ -7,7 +7,7 @@
def get_path(kwargs, required=True):
- path = kwargs.get('path')
+ path = kwargs.get("path")
if path:
return unquote_plus(path)
elif required:
@@ -23,7 +23,9 @@ def get_refs(addon, branch=None, sha=None, connection=None):
:param Bitbucket connection: Bitbucket API object. If None, one will be created
from the addon's user settings.
"""
- connection = connection or BitbucketClient(access_token=addon.external_account.oauth_key)
+ connection = connection or BitbucketClient(
+ access_token=addon.external_account.oauth_key
+ )
if sha and not branch:
raise HTTPError(http_status.HTTP_400_BAD_REQUEST)
@@ -39,11 +41,12 @@ def get_refs(addon, branch=None, sha=None, connection=None):
# identify commit sha for requested branch
for each in branches:
- if branch == each['name']:
- sha = each['target']['hash']
+ if branch == each["name"]:
+ sha = each["target"]["hash"]
break
- return branch, sha, [
- {'name': x['name'], 'sha': x['target']['hash']}
- for x in branches
- ]
+ return (
+ branch,
+ sha,
+ [{"name": x["name"], "sha": x["target"]["hash"]} for x in branches],
+ )
diff --git a/addons/bitbucket/views.py b/addons/bitbucket/views.py
index 0409595c9e1..ac252f610cd 100644
--- a/addons/bitbucket/views.py
+++ b/addons/bitbucket/views.py
@@ -1,4 +1,5 @@
"""Views for the node settings page."""
+
from rest_framework import status as http_status
import logging
@@ -12,64 +13,62 @@
from addons.bitbucket.serializer import BitbucketSerializer
from website.project.decorators import (
- must_have_addon, must_be_addon_authorizer,
- must_have_permission, must_not_be_registration,
- must_be_contributor_or_public
+ must_have_addon,
+ must_be_addon_authorizer,
+ must_have_permission,
+ must_not_be_registration,
+ must_be_contributor_or_public,
)
from osf.utils.permissions import WRITE
logger = logging.getLogger(__name__)
-SHORT_NAME = 'bitbucket'
-FULL_NAME = 'Bitbucket'
+SHORT_NAME = "bitbucket"
+FULL_NAME = "Bitbucket"
############
# Generics #
############
bitbucket_account_list = generic_views.account_list(
- SHORT_NAME,
- BitbucketSerializer
+ SHORT_NAME, BitbucketSerializer
)
bitbucket_import_auth = generic_views.import_auth(
- SHORT_NAME,
- BitbucketSerializer
+ SHORT_NAME, BitbucketSerializer
)
+
def _get_folders(node_addon, folder_id):
pass
+
bitbucket_folder_list = generic_views.folder_list(
- SHORT_NAME,
- FULL_NAME,
- _get_folders
+ SHORT_NAME, FULL_NAME, _get_folders
)
bitbucket_get_config = generic_views.get_config(
- SHORT_NAME,
- BitbucketSerializer
+ SHORT_NAME, BitbucketSerializer
)
-bitbucket_deauthorize_node = generic_views.deauthorize_node(
- SHORT_NAME
-)
+bitbucket_deauthorize_node = generic_views.deauthorize_node(SHORT_NAME)
#################
# Special Cased #
#################
+
@must_not_be_registration
-@must_have_addon(SHORT_NAME, 'user')
-@must_have_addon(SHORT_NAME, 'node')
+@must_have_addon(SHORT_NAME, "user")
+@must_have_addon(SHORT_NAME, "node")
@must_be_addon_authorizer(SHORT_NAME)
@must_have_permission(WRITE)
def bitbucket_set_config(auth, **kwargs):
- node_settings = kwargs.get('node_addon', None)
- node = kwargs.get('node', None)
- user_settings = kwargs.get('user_addon', None)
+ node_settings = kwargs.get("node_addon", None)
+ node = kwargs.get("node", None)
+ user_settings = kwargs.get("user_addon", None)
try:
if not node:
@@ -80,35 +79,34 @@ def bitbucket_set_config(auth, **kwargs):
raise HTTPError(http_status.HTTP_400_BAD_REQUEST)
# Parse request
- bitbucket_user_name = request.json.get('bitbucket_user', '')
- bitbucket_repo_name = request.json.get('bitbucket_repo', '')
+ bitbucket_user_name = request.json.get("bitbucket_user", "")
+ bitbucket_repo_name = request.json.get("bitbucket_repo", "")
if not bitbucket_user_name or not bitbucket_repo_name:
raise HTTPError(http_status.HTTP_400_BAD_REQUEST)
# Verify that repo exists and that user can access
- connection = BitbucketClient(access_token=node_settings.external_account.oauth_key)
+ connection = BitbucketClient(
+ access_token=node_settings.external_account.oauth_key
+ )
repo = connection.repo(bitbucket_user_name, bitbucket_repo_name)
if repo is None:
if user_settings:
message = (
- 'Cannot access repo. Either the repo does not exist '
- 'or your account does not have permission to view it.'
+ "Cannot access repo. Either the repo does not exist "
+ "or your account does not have permission to view it."
)
else:
- message = (
- 'Cannot access repo.'
- )
- return {'message': message}, http_status.HTTP_400_BAD_REQUEST
+ message = "Cannot access repo."
+ return {"message": message}, http_status.HTTP_400_BAD_REQUEST
changed = (
- bitbucket_user_name != node_settings.user or
- bitbucket_repo_name != node_settings.repo
+ bitbucket_user_name != node_settings.user
+ or bitbucket_repo_name != node_settings.repo
)
# Update hooks
if changed:
-
# # Delete existing hook, if any
# node_settings.delete_hook()
@@ -118,14 +116,14 @@ def bitbucket_set_config(auth, **kwargs):
# Log repo select
node.add_log(
- action='bitbucket_repo_linked',
+ action="bitbucket_repo_linked",
params={
- 'project': node.parent_id,
- 'node': node._id,
- 'bitbucket': {
- 'user': bitbucket_user_name,
- 'repo': bitbucket_repo_name,
- }
+ "project": node.parent_id,
+ "node": node._id,
+ "bitbucket": {
+ "user": bitbucket_user_name,
+ "repo": bitbucket_repo_name,
+ },
},
auth=auth,
)
@@ -138,14 +136,16 @@ def bitbucket_set_config(auth, **kwargs):
return {}
+
@must_be_contributor_or_public
-@must_have_addon('bitbucket', 'node')
+@must_have_addon("bitbucket", "node")
def bitbucket_download_starball(node_addon, **kwargs):
+ archive = kwargs.get("archive", "tar")
+ ref = request.args.get("sha", "master")
- archive = kwargs.get('archive', 'tar')
- ref = request.args.get('sha', 'master')
-
- connection = BitbucketClient(access_token=node_addon.external_account.oauth_key)
+ connection = BitbucketClient(
+ access_token=node_addon.external_account.oauth_key
+ )
headers, data = connection.starball(
node_addon.user, node_addon.repo, archive, ref
)
@@ -156,20 +156,22 @@ def bitbucket_download_starball(node_addon, **kwargs):
return resp
+
#########
# HGrid #
#########
+
@must_be_contributor_or_public
-@must_have_addon('bitbucket', 'node')
+@must_have_addon("bitbucket", "node")
def bitbucket_root_folder(*args, **kwargs):
"""View function returning the root container for a Bitbucket repo. In
contrast to other add-ons, this is exposed via the API for Bitbucket to
accommodate switching between branches and commits.
"""
- node_settings = kwargs['node_addon']
- auth = kwargs['auth']
+ node_settings = kwargs["node_addon"]
+ auth = kwargs["auth"]
data = request.args.to_dict()
return bitbucket_hgrid_data(node_settings, auth=auth, **data)
diff --git a/addons/boa/apps.py b/addons/boa/apps.py
index bc0e4fa78a5..a43318e541a 100644
--- a/addons/boa/apps.py
+++ b/addons/boa/apps.py
@@ -3,34 +3,38 @@
from addons.base.apps import BaseAddonAppConfig
HERE = os.path.dirname(os.path.abspath(__file__))
-TEMPLATE_PATH = os.path.join(HERE, 'templates')
+TEMPLATE_PATH = os.path.join(HERE, "templates")
class BoaAddonAppConfig(BaseAddonAppConfig):
-
default = True
- name = 'addons.boa'
- label = 'addons_boa'
- full_name = 'Boa'
- short_name = 'boa'
- owners = ['user', 'node']
- configs = ['accounts', 'node']
- categories = ['remote-computing']
+ name = "addons.boa"
+ label = "addons_boa"
+ full_name = "Boa"
+ short_name = "boa"
+ owners = ["user", "node"]
+ configs = ["accounts", "node"]
+ categories = ["remote-computing"]
has_hgrid_files = False
- node_settings_template = os.path.join(TEMPLATE_PATH, 'boa_node_settings.mako')
- user_settings_template = os.path.join(TEMPLATE_PATH, 'boa_user_settings.mako')
+ node_settings_template = os.path.join(
+ TEMPLATE_PATH, "boa_node_settings.mako"
+ )
+ user_settings_template = os.path.join(
+ TEMPLATE_PATH, "boa_user_settings.mako"
+ )
actions = ()
@property
def routes(self):
from .routes import api_routes
+
return [api_routes]
@property
def user_settings(self):
- return self.get_model('UserSettings')
+ return self.get_model("UserSettings")
@property
def node_settings(self):
- return self.get_model('NodeSettings')
+ return self.get_model("NodeSettings")
diff --git a/addons/boa/boa_error_code.py b/addons/boa/boa_error_code.py
index f43f1665b92..c64197681ac 100644
--- a/addons/boa/boa_error_code.py
+++ b/addons/boa/boa_error_code.py
@@ -2,15 +2,18 @@
class BoaErrorCode(IntEnum):
- """Define 8 types of failures and errors (0~7) and 1 type for no error (-1) during Boa submit.
- """
+ """Define 8 types of failures and errors (0~7) and 1 type for no error (-1) during Boa submit."""
- NO_ERROR = -1 # No error
- UNKNOWN = 0 # Unexpected error from WB and/or Boa
- AUTHN_ERROR = 1 # Fail to authenticate with Boa
- QUERY_ERROR = 2 # Fail to compile or execute the Boa query
- UPLOAD_ERROR_CONFLICT = 3 # Fail to upload the output to OSF because file already exists
- UPLOAD_ERROR_OTHER = 4 # Fail to upload the output to OSF due to reasons other than ``UPLOAD_ERROR_CONFLICT``
- OUTPUT_ERROR = 5 # Fail to retrieve the output after Boa job has finished
- FILE_TOO_LARGE_ERROR = 6 # Fail to submit to Boa due to query file too large
- JOB_TIME_OUT_ERROR = 7 # Fail to finish Boa job due to time out
+ NO_ERROR = -1 # No error
+ UNKNOWN = 0 # Unexpected error from WB and/or Boa
+ AUTHN_ERROR = 1 # Fail to authenticate with Boa
+ QUERY_ERROR = 2 # Fail to compile or execute the Boa query
+ UPLOAD_ERROR_CONFLICT = (
+ 3 # Fail to upload the output to OSF because file already exists
+ )
+ UPLOAD_ERROR_OTHER = 4 # Fail to upload the output to OSF due to reasons other than ``UPLOAD_ERROR_CONFLICT``
+ OUTPUT_ERROR = 5 # Fail to retrieve the output after Boa job has finished
+ FILE_TOO_LARGE_ERROR = (
+ 6 # Fail to submit to Boa due to query file too large
+ )
+ JOB_TIME_OUT_ERROR = 7 # Fail to finish Boa job due to time out
diff --git a/addons/boa/models.py b/addons/boa/models.py
index 01578a89bdc..0f5faf99d57 100644
--- a/addons/boa/models.py
+++ b/addons/boa/models.py
@@ -1,7 +1,11 @@
from django.db import models
from addons.base.exceptions import NotApplicableError
-from addons.base.models import BaseOAuthNodeSettings, BaseOAuthUserSettings, BaseStorageAddon
+from addons.base.models import (
+ BaseOAuthNodeSettings,
+ BaseOAuthUserSettings,
+ BaseStorageAddon,
+)
from addons.boa.serializer import BoaSerializer
from addons.boa.settings import DEFAULT_HOSTS
from framework.auth import Auth
@@ -11,39 +15,41 @@
class BoaProvider(BasicAuthProviderMixin):
"""Boa provider, an alternative to `ExternalProvider` which is not tied to OAuth"""
- name = 'Boa'
- short_name = 'boa'
+ name = "Boa"
+ short_name = "boa"
def __init__(self, account=None, host=None, username=None, password=None):
if username:
username = username.lower()
- super().__init__(account=account, host=host, username=username, password=password)
+ super().__init__(
+ account=account, host=host, username=username, password=password
+ )
def __repr__(self):
- return '<{name}: {status}>'.format(
+ return "<{name}: {status}>".format(
name=self.__class__.__name__,
- status=self.account.display_name if self.account else 'anonymous'
+ status=self.account.display_name if self.account else "anonymous",
)
class UserSettings(BaseOAuthUserSettings):
-
oauth_provider = BoaProvider
serializer = BoaSerializer
def to_json(self, user):
ret = super().to_json(user)
- ret['hosts'] = DEFAULT_HOSTS
+ ret["hosts"] = DEFAULT_HOSTS
return ret
class NodeSettings(BaseOAuthNodeSettings, BaseStorageAddon):
-
oauth_provider = BoaProvider
serializer = BoaSerializer
folder_id = models.TextField(blank=True, null=True)
- user_settings = models.ForeignKey(UserSettings, null=True, blank=True, on_delete=models.CASCADE)
+ user_settings = models.ForeignKey(
+ UserSettings, null=True, blank=True, on_delete=models.CASCADE
+ )
_api = None
@@ -66,9 +72,9 @@ def set_folder(self, folder, auth=None):
raise NotApplicableError
def fetch_folder_name(self):
- if self.folder_id == '/':
- return '/ (Full Boa)'
- return self.folder_id.strip('/').split('/')[-1]
+ if self.folder_id == "/":
+ return "/ (Full Boa)"
+ return self.folder_id.strip("/").split("/")[-1]
def clear_settings(self):
self.folder_id = None
@@ -77,7 +83,7 @@ def deauthorize(self, auth=None, add_log=True):
"""Remove user authorization from this node and log the event."""
self.clear_settings()
if add_log:
- self.nodelogger.log(action='node_deauthorized')
+ self.nodelogger.log(action="node_deauthorized")
self.clear_auth() # Also performs a .save()
def serialize_waterbutler_credentials(self):
diff --git a/addons/boa/routes.py b/addons/boa/routes.py
index 0bc818e1925..4ba47c3a137 100644
--- a/addons/boa/routes.py
+++ b/addons/boa/routes.py
@@ -2,57 +2,55 @@
from framework.routing import Rule, json_renderer
api_routes = {
- 'rules': [
+ "rules": [
Rule(
[
- '/project//boa/user-auth/',
- '/project//node//boa/user-auth/',
+ "/project//boa/user-auth/",
+ "/project//node//boa/user-auth/",
],
- 'delete',
+ "delete",
views.boa_deauthorize_node,
json_renderer,
),
Rule(
- '/settings/boa/accounts/',
- 'get',
+ "/settings/boa/accounts/",
+ "get",
views.boa_account_list,
json_renderer,
),
Rule(
[
- '/project//boa/settings/',
- '/project//node//boa/settings/'
+ "/project//boa/settings/",
+ "/project//node//boa/settings/",
],
- 'get',
+ "get",
views.boa_get_config,
- json_renderer
+ json_renderer,
),
Rule(
- [
- '/settings/boa/accounts/'
- ],
- 'post',
+ ["/settings/boa/accounts/"],
+ "post",
views.boa_add_user_account,
- json_renderer
+ json_renderer,
),
Rule(
[
- '/project//boa/user-auth/',
- '/project//node//boa/user-auth/',
+ "/project//boa/user-auth/",
+ "/project//node//boa/user-auth/",
],
- 'put',
+ "put",
views.boa_import_auth,
- json_renderer
+ json_renderer,
),
Rule(
[
- '/project//boa/submit-job/',
- '/project//node//boa/submit-job/',
+ "/project//boa/submit-job/",
+ "/project//node//boa/submit-job/",
],
- 'post',
+ "post",
views.boa_submit_job,
- json_renderer
+ json_renderer,
),
],
- 'prefix': '/api/v1'
+ "prefix": "/api/v1",
}
diff --git a/addons/boa/serializer.py b/addons/boa/serializer.py
index 42c738e62a8..1e2144ae08c 100644
--- a/addons/boa/serializer.py
+++ b/addons/boa/serializer.py
@@ -8,8 +8,7 @@
class BoaSerializer(StorageAddonSerializer):
-
- addon_short_name = 'boa'
+ addon_short_name = "boa"
def serialized_folder(self, node_settings):
"""Not applicable to remote computing add-ons"""
@@ -17,7 +16,9 @@ def serialized_folder(self, node_settings):
def credentials_are_valid(self, user_settings, client=None):
if client is not None:
- sentry.log_message('Client ignored for Boa Serializer in credentials_are_valid()')
+ sentry.log_message(
+ "Client ignored for Boa Serializer in credentials_are_valid()"
+ )
external_account = self.node_settings.external_account
if external_account is None:
return False
@@ -33,38 +34,43 @@ def credentials_are_valid(self, user_settings, client=None):
@property
def addon_serialized_urls(self):
-
node = self.node_settings.owner
user_settings = self.node_settings.user_settings or self.user_settings
result = {
- 'auth': node.api_url_for('boa_add_user_account'),
- 'accounts': node.api_url_for('boa_account_list'),
- 'importAuth': node.api_url_for('boa_import_auth'),
- 'deauthorize': node.api_url_for('boa_deauthorize_node'),
- 'folders': None,
- 'files': None,
- 'config': None,
+ "auth": node.api_url_for("boa_add_user_account"),
+ "accounts": node.api_url_for("boa_account_list"),
+ "importAuth": node.api_url_for("boa_import_auth"),
+ "deauthorize": node.api_url_for("boa_deauthorize_node"),
+ "folders": None,
+ "files": None,
+ "config": None,
}
if user_settings:
- result['owner'] = web_url_for('profile_view_id', uid=user_settings.owner._id)
+ result["owner"] = web_url_for(
+ "profile_view_id", uid=user_settings.owner._id
+ )
return result
@property
def serialized_node_settings(self):
result = super().serialized_node_settings
- result['hosts'] = DEFAULT_HOSTS
+ result["hosts"] = DEFAULT_HOSTS
return result
@property
def serialized_user_settings(self):
result = super().serialized_user_settings
- result['hosts'] = DEFAULT_HOSTS
+ result["hosts"] = DEFAULT_HOSTS
return result
def serialize_settings(self, node_settings, current_user, client=None):
if client is not None:
- sentry.log_message('Client ignored for Boa Serializer in serialize_settings()')
- ret = super().serialize_settings(node_settings, current_user, client=client)
- ret['hosts'] = DEFAULT_HOSTS
+ sentry.log_message(
+ "Client ignored for Boa Serializer in serialize_settings()"
+ )
+ ret = super().serialize_settings(
+ node_settings, current_user, client=client
+ )
+ ret["hosts"] = DEFAULT_HOSTS
return ret
diff --git a/addons/boa/settings/__init__.py b/addons/boa/settings/__init__.py
index 6deafee2808..3d72f91216a 100644
--- a/addons/boa/settings/__init__.py
+++ b/addons/boa/settings/__init__.py
@@ -7,4 +7,4 @@
try:
from addons.boa.settings.local import * # noqa
except ImportError:
- logger.warning('No local.py settings file found')
+ logger.warning("No local.py settings file found")
diff --git a/addons/boa/settings/defaults.py b/addons/boa/settings/defaults.py
index 0422ba1928d..d66a689d102 100644
--- a/addons/boa/settings/defaults.py
+++ b/addons/boa/settings/defaults.py
@@ -11,28 +11,28 @@
REFRESH_JOB_INTERVAL = 10 # 10 seconds
# Suffix to replace '.boa' for the output file
-OUTPUT_FILE_SUFFIX = '_results.txt'
+OUTPUT_FILE_SUFFIX = "_results.txt"
BOA_DATASETS = [
- '2022 Jan/Java',
- '2022 Feb/Python',
- '2021 Method Chains',
- '2021 Aug/Python',
- '2021 Aug/Kotlin (small)',
- '2021 Aug/Kotlin',
- '2021 Jan/ML-Verse',
- '2020 August/Python-DS',
- '2019 October/GitHub (small)',
- '2019 October/GitHub (medium)',
- '2019 October/GitHub',
- '2015 September/GitHub',
- '2013 September/SF (small)',
- '2013 September/SF (medium)',
- '2013 September/SF',
- '2013 May/SF',
- '2013 February/SF',
- '2012 July/SF',
+ "2022 Jan/Java",
+ "2022 Feb/Python",
+ "2021 Method Chains",
+ "2021 Aug/Python",
+ "2021 Aug/Kotlin (small)",
+ "2021 Aug/Kotlin",
+ "2021 Jan/ML-Verse",
+ "2020 August/Python-DS",
+ "2019 October/GitHub (small)",
+ "2019 October/GitHub (medium)",
+ "2019 October/GitHub",
+ "2015 September/GitHub",
+ "2013 September/SF (small)",
+ "2013 September/SF (medium)",
+ "2013 September/SF",
+ "2013 May/SF",
+ "2013 February/SF",
+ "2012 July/SF",
]
-BOA_JOB_LIST_URL = 'https://boa.cs.iastate.edu/boa/index.php?q=boa/jobs'
-BOA_SUPPORT_EMAIL = 'boasupport@iastate.edu'
+BOA_JOB_LIST_URL = "https://boa.cs.iastate.edu/boa/index.php?q=boa/jobs"
+BOA_SUPPORT_EMAIL = "boasupport@iastate.edu"
diff --git a/addons/boa/tasks.py b/addons/boa/tasks.py
index a64110e69b5..ba335aa7c39 100644
--- a/addons/boa/tasks.py
+++ b/addons/boa/tasks.py
@@ -17,15 +17,29 @@
from osf.models import OSFUser
from osf.utils.fields import ensure_str, ensure_bytes
from website import settings as osf_settings
-from website.mails import send_mail, ADDONS_BOA_JOB_COMPLETE, ADDONS_BOA_JOB_FAILURE
+from website.mails import (
+ send_mail,
+ ADDONS_BOA_JOB_COMPLETE,
+ ADDONS_BOA_JOB_FAILURE,
+)
logger = logging.getLogger(__name__)
-@celery_app.task(name='addons.boa.tasks.submit_to_boa')
-def submit_to_boa(host, username, password, user_guid, project_guid,
- query_dataset, query_file_name, file_size, file_full_path,
- query_download_url, output_upload_url):
+@celery_app.task(name="addons.boa.tasks.submit_to_boa")
+def submit_to_boa(
+ host,
+ username,
+ password,
+ user_guid,
+ project_guid,
+ query_dataset,
+ query_file_name,
+ file_size,
+ file_full_path,
+ query_download_url,
+ output_upload_url,
+):
"""
Download Boa query file, submit it to Boa API, wait for Boa to finish the job
and upload result output to OSF. Send success / failure emails notifications.
@@ -38,14 +52,34 @@ def submit_to_boa(host, username, password, user_guid, project_guid,
* Running asyncio in celery is tricky. Refer to the discussion below for details:
* https://stackoverflow.com/questions/39815771/how-to-combine-celery-with-asyncio
"""
- return async_to_sync(submit_to_boa_async)(host, username, password, user_guid, project_guid,
- query_dataset, query_file_name, file_size, file_full_path,
- query_download_url, output_upload_url)
+ return async_to_sync(submit_to_boa_async)(
+ host,
+ username,
+ password,
+ user_guid,
+ project_guid,
+ query_dataset,
+ query_file_name,
+ file_size,
+ file_full_path,
+ query_download_url,
+ output_upload_url,
+ )
-async def submit_to_boa_async(host, username, password, user_guid, project_guid,
- query_dataset, query_file_name, file_size, file_full_path,
- query_download_url, output_upload_url):
+async def submit_to_boa_async(
+ host,
+ username,
+ password,
+ user_guid,
+ project_guid,
+ query_dataset,
+ query_file_name,
+ file_size,
+ file_full_path,
+ query_download_url,
+ output_upload_url,
+):
"""
Download Boa query file, submit it to Boa API, wait for Boa to finish the job
and upload result output to OSF. Send success / failure emails notifications.
@@ -55,135 +89,240 @@ async def submit_to_boa_async(host, username, password, user_guid, project_guid,
* See notes in ``submit_to_boa()`` for details.
"""
- logger.debug('>>>>>>>> Task begins')
+ logger.debug(">>>>>>>> Task begins")
user = await sync_to_async(OSFUser.objects.get)(guids___id=user_guid)
cookie_value = (await sync_to_async(user.get_or_create_cookie)()).decode()
- project_url = f'{osf_settings.DOMAIN}{project_guid}/'
- output_file_name = query_file_name.replace('.boa', boa_settings.OUTPUT_FILE_SUFFIX)
+ project_url = f"{osf_settings.DOMAIN}{project_guid}/"
+ output_file_name = query_file_name.replace(
+ ".boa", boa_settings.OUTPUT_FILE_SUFFIX
+ )
if file_size > boa_settings.MAX_SUBMISSION_SIZE:
- message = f'Boa query file too large to submit: user=[{user_guid}], project=[{project_guid}], ' \
- f'file_name=[{query_file_name}], file_size=[{file_size}], ' \
- f'full_path=[{file_full_path}], url=[{query_download_url}] ...'
- await sync_to_async(handle_boa_error)(message, BoaErrorCode.FILE_TOO_LARGE_ERROR,
- user.username, user.fullname, project_url, file_full_path,
- query_file_name=query_file_name, file_size=file_size)
+ message = (
+ f"Boa query file too large to submit: user=[{user_guid}], project=[{project_guid}], "
+ f"file_name=[{query_file_name}], file_size=[{file_size}], "
+ f"full_path=[{file_full_path}], url=[{query_download_url}] ..."
+ )
+ await sync_to_async(handle_boa_error)(
+ message,
+ BoaErrorCode.FILE_TOO_LARGE_ERROR,
+ user.username,
+ user.fullname,
+ project_url,
+ file_full_path,
+ query_file_name=query_file_name,
+ file_size=file_size,
+ )
return BoaErrorCode.FILE_TOO_LARGE_ERROR
- logger.debug(f'Downloading Boa query file: user=[{user_guid}], project=[{project_guid}], '
- f'file_name=[{query_file_name}], full_path=[{file_full_path}], url=[{query_download_url}] ...')
+ logger.debug(
+ f"Downloading Boa query file: user=[{user_guid}], project=[{project_guid}], "
+ f"file_name=[{query_file_name}], full_path=[{file_full_path}], url=[{query_download_url}] ..."
+ )
download_request = request.Request(query_download_url)
- download_request.add_header('Cookie', f'{osf_settings.COOKIE_NAME}={cookie_value}')
+ download_request.add_header(
+ "Cookie", f"{osf_settings.COOKIE_NAME}={cookie_value}"
+ )
try:
boa_query = ensure_str(request.urlopen(download_request).read())
except (ValueError, HTTPError, URLError, HTTPException):
- message = f'Failed to download Boa query file: user=[{user_guid}], project=[{project_guid}], ' \
- f'file_name=[{query_file_name}], full_path=[{file_full_path}], url=[{query_download_url}] ...'
- await sync_to_async(handle_boa_error)(message, BoaErrorCode.UNKNOWN, user.username, user.fullname,
- project_url, file_full_path, query_file_name=query_file_name)
+ message = (
+ f"Failed to download Boa query file: user=[{user_guid}], project=[{project_guid}], "
+ f"file_name=[{query_file_name}], full_path=[{file_full_path}], url=[{query_download_url}] ..."
+ )
+ await sync_to_async(handle_boa_error)(
+ message,
+ BoaErrorCode.UNKNOWN,
+ user.username,
+ user.fullname,
+ project_url,
+ file_full_path,
+ query_file_name=query_file_name,
+ )
return BoaErrorCode.UNKNOWN
- logger.info('Boa query successfully downloaded.')
- logger.debug(f'Boa query:\n########\n{boa_query}\n########')
+ logger.info("Boa query successfully downloaded.")
+ logger.debug(f"Boa query:\n########\n{boa_query}\n########")
- logger.debug('Boa client opened.')
+ logger.debug("Boa client opened.")
client = BoaClient(endpoint=host)
- logger.debug(f'Checking Boa credentials: boa_username=[{username}], boa_host=[{host}] ...')
+ logger.debug(
+ f"Checking Boa credentials: boa_username=[{username}], boa_host=[{host}] ..."
+ )
try:
client.login(username, password)
except BoaException:
# Don't call `client.close()`, since it will fail with `BoaException` if `client.login()` fails
- message = f'Boa login failed: boa_username=[{username}], boa_host=[{host}]!'
- await sync_to_async(handle_boa_error)(message, BoaErrorCode.AUTHN_ERROR, user.username, user.fullname,
- project_url, file_full_path, query_file_name=query_file_name)
+ message = (
+ f"Boa login failed: boa_username=[{username}], boa_host=[{host}]!"
+ )
+ await sync_to_async(handle_boa_error)(
+ message,
+ BoaErrorCode.AUTHN_ERROR,
+ user.username,
+ user.fullname,
+ project_url,
+ file_full_path,
+ query_file_name=query_file_name,
+ )
return BoaErrorCode.AUTHN_ERROR
- logger.info('Boa login completed.')
+ logger.info("Boa login completed.")
- logger.debug(f'Retrieving Boa dataset: dataset=[{query_dataset}] ...')
+ logger.debug(f"Retrieving Boa dataset: dataset=[{query_dataset}] ...")
try:
dataset = client.get_dataset(query_dataset)
except BoaException:
client.close()
- message = f'Failed to retrieve or verify the target Boa dataset: dataset=[{query_dataset}]!'
- await sync_to_async(handle_boa_error)(message, BoaErrorCode.UNKNOWN, user.username, user.fullname,
- project_url, file_full_path, query_file_name=query_file_name)
+ message = f"Failed to retrieve or verify the target Boa dataset: dataset=[{query_dataset}]!"
+ await sync_to_async(handle_boa_error)(
+ message,
+ BoaErrorCode.UNKNOWN,
+ user.username,
+ user.fullname,
+ project_url,
+ file_full_path,
+ query_file_name=query_file_name,
+ )
return BoaErrorCode.UNKNOWN
- logger.info('Boa dataset retrieved.')
+ logger.info("Boa dataset retrieved.")
- logger.debug(f'Submitting the query to Boa API: boa_host=[{host}], dataset=[{query_dataset}] ...')
+ logger.debug(
+ f"Submitting the query to Boa API: boa_host=[{host}], dataset=[{query_dataset}] ..."
+ )
try:
boa_job = client.query(boa_query, dataset)
start_time = time.time()
except BoaException:
client.close()
- message = f'Failed to submit the query to Boa API: : boa_host=[{host}], dataset=[{query_dataset}]!'
- await sync_to_async(handle_boa_error)(message, BoaErrorCode.UNKNOWN, user.username, user.fullname,
- project_url, file_full_path, query_file_name=query_file_name)
+ message = f"Failed to submit the query to Boa API: : boa_host=[{host}], dataset=[{query_dataset}]!"
+ await sync_to_async(handle_boa_error)(
+ message,
+ BoaErrorCode.UNKNOWN,
+ user.username,
+ user.fullname,
+ project_url,
+ file_full_path,
+ query_file_name=query_file_name,
+ )
return BoaErrorCode.UNKNOWN
- logger.info('Query successfully submitted.')
- logger.debug(f'Waiting for job to finish: job_id=[{str(boa_job.id)}] ...')
+ logger.info("Query successfully submitted.")
+ logger.debug(f"Waiting for job to finish: job_id=[{str(boa_job.id)}] ...")
while boa_job.is_running():
if time.time() - start_time > boa_settings.MAX_JOB_WAITING_TIME:
client.close()
- message = f'Boa job did not complete in time: job_id=[{str(boa_job.id)}]!'
- await sync_to_async(handle_boa_error)(message, BoaErrorCode.JOB_TIME_OUT_ERROR,
- user.username, user.fullname, project_url, file_full_path,
- query_file_name=query_file_name, job_id=boa_job.id)
+ message = f"Boa job did not complete in time: job_id=[{str(boa_job.id)}]!"
+ await sync_to_async(handle_boa_error)(
+ message,
+ BoaErrorCode.JOB_TIME_OUT_ERROR,
+ user.username,
+ user.fullname,
+ project_url,
+ file_full_path,
+ query_file_name=query_file_name,
+ job_id=boa_job.id,
+ )
return BoaErrorCode.JOB_TIME_OUT_ERROR
- logger.debug(f'Boa job still running, waiting 10s: job_id=[{str(boa_job.id)}] ...')
+ logger.debug(
+ f"Boa job still running, waiting 10s: job_id=[{str(boa_job.id)}] ..."
+ )
boa_job.refresh()
await asyncio.sleep(boa_settings.REFRESH_JOB_INTERVAL)
if boa_job.compiler_status is CompilerStatus.ERROR:
client.close()
- message = f'Boa job failed with compile error: job_id=[{str(boa_job.id)}]!'
- await sync_to_async(handle_boa_error)(message, BoaErrorCode.QUERY_ERROR, user.username,
- user.fullname, project_url, file_full_path,
- query_file_name=query_file_name, job_id=boa_job.id)
+ message = (
+ f"Boa job failed with compile error: job_id=[{str(boa_job.id)}]!"
+ )
+ await sync_to_async(handle_boa_error)(
+ message,
+ BoaErrorCode.QUERY_ERROR,
+ user.username,
+ user.fullname,
+ project_url,
+ file_full_path,
+ query_file_name=query_file_name,
+ job_id=boa_job.id,
+ )
return BoaErrorCode.QUERY_ERROR
elif boa_job.exec_status is ExecutionStatus.ERROR:
client.close()
- message = f'Boa job failed with execution error: job_id=[{str(boa_job.id)}]!'
- await sync_to_async(handle_boa_error)(message, BoaErrorCode.QUERY_ERROR, user.username,
- user.fullname, project_url, file_full_path,
- query_file_name=query_file_name, job_id=boa_job.id)
+ message = (
+ f"Boa job failed with execution error: job_id=[{str(boa_job.id)}]!"
+ )
+ await sync_to_async(handle_boa_error)(
+ message,
+ BoaErrorCode.QUERY_ERROR,
+ user.username,
+ user.fullname,
+ project_url,
+ file_full_path,
+ query_file_name=query_file_name,
+ job_id=boa_job.id,
+ )
return BoaErrorCode.QUERY_ERROR
else:
try:
boa_job_output = boa_job.output()
except BoaException:
client.close()
- message = f'Boa job output is not available: job_id=[{str(boa_job.id)}]!'
- await sync_to_async(handle_boa_error)(message, BoaErrorCode.OUTPUT_ERROR, user.username,
- user.fullname, project_url, file_full_path,
- query_file_name=query_file_name, job_id=boa_job.id)
+ message = (
+ f"Boa job output is not available: job_id=[{str(boa_job.id)}]!"
+ )
+ await sync_to_async(handle_boa_error)(
+ message,
+ BoaErrorCode.OUTPUT_ERROR,
+ user.username,
+ user.fullname,
+ project_url,
+ file_full_path,
+ query_file_name=query_file_name,
+ job_id=boa_job.id,
+ )
return BoaErrorCode.OUTPUT_ERROR
- logger.info('Boa job finished.')
- logger.debug(f'Boa job output: job_id=[{str(boa_job.id)}]\n########\n{boa_job_output}\n########')
+ logger.info("Boa job finished.")
+ logger.debug(
+ f"Boa job output: job_id=[{str(boa_job.id)}]\n########\n{boa_job_output}\n########"
+ )
client.close()
- logger.debug('Boa client closed.')
+ logger.debug("Boa client closed.")
- logger.debug(f'Uploading Boa query output to OSF: name=[{output_file_name}], upload_url=[{output_upload_url}] ...')
+ logger.debug(
+ f"Uploading Boa query output to OSF: name=[{output_file_name}], upload_url=[{output_upload_url}] ..."
+ )
try:
- output_query_param = urlencode({'name': output_file_name})
- upload_request = request.Request(f'{output_upload_url}&{output_query_param}')
- upload_request.method = 'PUT'
+ output_query_param = urlencode({"name": output_file_name})
+ upload_request = request.Request(
+ f"{output_upload_url}&{output_query_param}"
+ )
+ upload_request.method = "PUT"
upload_request.data = ensure_bytes(boa_job_output)
- upload_request.add_header('Cookie', f'{osf_settings.COOKIE_NAME}={cookie_value}')
+ upload_request.add_header(
+ "Cookie", f"{osf_settings.COOKIE_NAME}={cookie_value}"
+ )
request.urlopen(upload_request)
except (ValueError, HTTPError, URLError, HTTPException) as e:
- message = f'Failed to upload query output file to OSF: ' \
- f'name=[{output_file_name}], user=[{user_guid}], url=[{output_upload_url}]!'
+ message = (
+ f"Failed to upload query output file to OSF: "
+ f"name=[{output_file_name}], user=[{user_guid}], url=[{output_upload_url}]!"
+ )
error_code = BoaErrorCode.UPLOAD_ERROR_OTHER
if isinstance(e, HTTPError):
- message += f', http_error=[{e.code}: {e.reason}]'
+ message += f", http_error=[{e.code}: {e.reason}]"
if e.code == 409:
error_code = BoaErrorCode.UPLOAD_ERROR_CONFLICT
- await sync_to_async(handle_boa_error)(message, error_code, user.username, user.fullname, project_url,
- file_full_path, query_file_name=query_file_name,
- output_file_name=output_file_name, job_id=boa_job.id)
+ await sync_to_async(handle_boa_error)(
+ message,
+ error_code,
+ user.username,
+ user.fullname,
+ project_url,
+ file_full_path,
+ query_file_name=query_file_name,
+ output_file_name=output_file_name,
+ job_id=boa_job.id,
+ )
return error_code
- logger.info('Successfully uploaded query output to OSF.')
- logger.debug('Task ends <<<<<<<<')
+ logger.info("Successfully uploaded query output to OSF.")
+ logger.debug("Task ends <<<<<<<<")
await sync_to_async(send_mail)(
to_addr=user.username,
mail=ADDONS_BOA_JOB_COMPLETE,
@@ -200,10 +339,19 @@ async def submit_to_boa_async(host, username, password, user_guid, project_guid,
return BoaErrorCode.NO_ERROR
-def handle_boa_error(message, code, username, fullname, project_url, query_file_full_path,
- query_file_name=None, file_size=None, output_file_name=None, job_id=None):
- """Handle Boa and WB API errors and send emails.
- """
+def handle_boa_error(
+ message,
+ code,
+ username,
+ fullname,
+ project_url,
+ query_file_full_path,
+ query_file_name=None,
+ file_size=None,
+ output_file_name=None,
+ job_id=None,
+):
+ """Handle Boa and WB API errors and send emails."""
logger.error(message)
try:
sentry.log_message(message, skip_session=True)
diff --git a/addons/boa/tests/factories.py b/addons/boa/tests/factories.py
index 6be5ccc2f42..42722ff45ae 100644
--- a/addons/boa/tests/factories.py
+++ b/addons/boa/tests/factories.py
@@ -2,26 +2,28 @@
from factory.django import DjangoModelFactory
from addons.boa.models import UserSettings, NodeSettings
-from osf_tests.factories import UserFactory, ProjectFactory, ExternalAccountFactory
+from osf_tests.factories import (
+ UserFactory,
+ ProjectFactory,
+ ExternalAccountFactory,
+)
-BOA_HOST = 'http://localhost:9999/boa/?q=boa/api'
-BOA_USERNAME = 'fake-boa-username'
-BOA_PASSWORD = 'fake-boa-password'
+BOA_HOST = "http://localhost:9999/boa/?q=boa/api"
+BOA_USERNAME = "fake-boa-username"
+BOA_PASSWORD = "fake-boa-password"
class BoaAccountFactory(ExternalAccountFactory):
-
- provider = 'boa'
- provider_name = 'Fake Boa Provider'
- provider_id = Sequence(lambda n: f'{BOA_HOST}:{BOA_USERNAME}-{n}')
- profile_url = Sequence(lambda n: f'http://localhost:9999/{n}/boa')
- oauth_secret = Sequence(lambda n: f'secret-{n}')
+ provider = "boa"
+ provider_name = "Fake Boa Provider"
+ provider_id = Sequence(lambda n: f"{BOA_HOST}:{BOA_USERNAME}-{n}")
+ profile_url = Sequence(lambda n: f"http://localhost:9999/{n}/boa")
+ oauth_secret = Sequence(lambda n: f"secret-{n}")
oauth_key = BOA_PASSWORD
- display_name = 'Fake Boa'
+ display_name = "Fake Boa"
class BoaUserSettingsFactory(DjangoModelFactory):
-
class Meta:
model = UserSettings
@@ -29,7 +31,6 @@ class Meta:
class BoaNodeSettingsFactory(DjangoModelFactory):
-
class Meta:
model = NodeSettings
diff --git a/addons/boa/tests/test_models.py b/addons/boa/tests/test_models.py
index 57e08072d30..e9e35132d64 100644
--- a/addons/boa/tests/test_models.py
+++ b/addons/boa/tests/test_models.py
@@ -2,7 +2,10 @@
import unittest
from addons.base.exceptions import NotApplicableError
-from addons.base.tests.models import OAuthAddonNodeSettingsTestSuiteMixin, OAuthAddonUserSettingTestSuiteMixin
+from addons.base.tests.models import (
+ OAuthAddonNodeSettingsTestSuiteMixin,
+ OAuthAddonUserSettingTestSuiteMixin,
+)
from addons.boa.tests.utils import BoaAddonTestCaseBaseMixin
from framework.auth import Auth
from osf.models import NodeLog
@@ -10,16 +13,24 @@
pytestmark = pytest.mark.django_db
-class TestUserSettings(BoaAddonTestCaseBaseMixin, OAuthAddonUserSettingTestSuiteMixin, unittest.TestCase):
-
+class TestUserSettings(
+ BoaAddonTestCaseBaseMixin,
+ OAuthAddonUserSettingTestSuiteMixin,
+ unittest.TestCase,
+):
pass
-class TestNodeSettings(BoaAddonTestCaseBaseMixin, OAuthAddonNodeSettingsTestSuiteMixin, unittest.TestCase):
-
+class TestNodeSettings(
+ BoaAddonTestCaseBaseMixin,
+ OAuthAddonNodeSettingsTestSuiteMixin,
+ unittest.TestCase,
+):
def test_set_folder(self):
with pytest.raises(NotApplicableError):
- self.node_settings.set_folder('fake_folder_id', auth=Auth(self.user))
+ self.node_settings.set_folder(
+ "fake_folder_id", auth=Auth(self.user)
+ )
def test_create_log(self):
with pytest.raises(NotApplicableError):
@@ -27,9 +38,9 @@ def test_create_log(self):
auth=Auth(user=self.user),
action=NodeLog.FILE_ADDED,
metadata={
- 'path': 'fake_path',
- 'materialized': 'fake_materialized_path',
- }
+ "path": "fake_path",
+ "materialized": "fake_materialized_path",
+ },
)
def test_serialize_credentials(self):
diff --git a/addons/boa/tests/test_serializer.py b/addons/boa/tests/test_serializer.py
index 058ed018533..7110e8b7ab2 100644
--- a/addons/boa/tests/test_serializer.py
+++ b/addons/boa/tests/test_serializer.py
@@ -9,13 +9,18 @@
pytestmark = pytest.mark.django_db
-class TestBoaSerializer(BoaAddonTestCaseBaseMixin, StorageAddonSerializerTestSuiteMixin, OsfTestCase):
-
+class TestBoaSerializer(
+ BoaAddonTestCaseBaseMixin,
+ StorageAddonSerializerTestSuiteMixin,
+ OsfTestCase,
+):
def set_provider_id(self, pid=None):
self.node_settings.folder_id = pid
def setUp(self):
- self.mock_credentials = mock.patch('addons.boa.serializer.BoaSerializer.credentials_are_valid')
+ self.mock_credentials = mock.patch(
+ "addons.boa.serializer.BoaSerializer.credentials_are_valid"
+ )
self.mock_credentials.return_value = True
self.mock_credentials.start()
super().setUp()
@@ -25,7 +30,11 @@ def tearDown(self):
super().tearDown()
def test_serialize_settings_authorized_folder_is_set(self):
- self.set_provider_id(pid='foo')
+ self.set_provider_id(pid="foo")
with pytest.raises(NotApplicableError):
- with mock.patch.object(type(self.node_settings), 'has_auth', return_value=True):
- _ = self.ser.serialize_settings(self.node_settings, self.user, self.client)
+ with mock.patch.object(
+ type(self.node_settings), "has_auth", return_value=True
+ ):
+ _ = self.ser.serialize_settings(
+ self.node_settings, self.user, self.client
+ )
diff --git a/addons/boa/tests/test_tasks.py b/addons/boa/tests/test_tasks.py
index a4842d6c417..164aaf38eb4 100644
--- a/addons/boa/tests/test_tasks.py
+++ b/addons/boa/tests/test_tasks.py
@@ -8,7 +8,11 @@
from addons.boa import settings as boa_settings
from addons.boa.boa_error_code import BoaErrorCode
-from addons.boa.tasks import submit_to_boa, submit_to_boa_async, handle_boa_error
+from addons.boa.tasks import (
+ submit_to_boa,
+ submit_to_boa_async,
+ handle_boa_error,
+)
from osf_tests.factories import AuthUserFactory, ProjectFactory
from tests.base import OsfTestCase
from website import settings as osf_settings
@@ -24,19 +28,18 @@ async def __call__(self, *args, **kwargs):
class TestBoaErrorHandling(OsfTestCase):
-
def setUp(self):
super().setUp()
- self.error_message = 'fake-error-message'
- self.user_username = 'fake-user-username'
- self.user_fullname = 'fake-user-fullname'
- self.project_url = 'http://localhost:5000/1a2b3'
- self.query_file_name = 'fake_boa_script.boa'
+ self.error_message = "fake-error-message"
+ self.user_username = "fake-user-username"
+ self.user_fullname = "fake-user-fullname"
+ self.project_url = "http://localhost:5000/1a2b3"
+ self.query_file_name = "fake_boa_script.boa"
self.file_size = 255
self.max_job_wait_hours = boa_settings.MAX_JOB_WAITING_TIME / 3600
- self.file_full_path = '/fake_boa_folder/fake_boa_script.boa'
- self.output_file_name = 'fake_boa_script_results.txt'
- self.job_id = '1a2b3c4d5e6f7g8'
+ self.file_full_path = "/fake_boa_folder/fake_boa_script.boa"
+ self.output_file_name = "fake_boa_script_results.txt"
+ self.job_id = "1a2b3c4d5e6f7g8"
def tearDown(self):
super().tearDown()
@@ -53,9 +56,13 @@ def test_boa_error_code(self):
assert BoaErrorCode.JOB_TIME_OUT_ERROR == 7
def test_handle_boa_error(self):
- with mock.patch('addons.boa.tasks.send_mail', return_value=None) as mock_send_mail, \
- mock.patch('addons.boa.tasks.sentry.log_message', return_value=None) as mock_sentry_log_message, \
- mock.patch('addons.boa.tasks.logger.error', return_value=None) as mock_logger_error:
+ with mock.patch(
+ "addons.boa.tasks.send_mail", return_value=None
+ ) as mock_send_mail, mock.patch(
+ "addons.boa.tasks.sentry.log_message", return_value=None
+ ) as mock_sentry_log_message, mock.patch(
+ "addons.boa.tasks.logger.error", return_value=None
+ ) as mock_logger_error:
return_value = handle_boa_error(
self.error_message,
BoaErrorCode.UNKNOWN,
@@ -66,7 +73,7 @@ def test_handle_boa_error(self):
query_file_name=self.query_file_name,
file_size=self.file_size,
output_file_name=self.output_file_name,
- job_id=self.job_id
+ job_id=self.job_id,
)
mock_send_mail.assert_called_with(
to_addr=self.user_username,
@@ -86,35 +93,36 @@ def test_handle_boa_error(self):
boa_support_email=boa_settings.BOA_SUPPORT_EMAIL,
osf_support_email=osf_settings.OSF_SUPPORT_EMAIL,
)
- mock_sentry_log_message.assert_called_with(self.error_message, skip_session=True)
+ mock_sentry_log_message.assert_called_with(
+ self.error_message, skip_session=True
+ )
mock_logger_error.assert_called_with(self.error_message)
assert return_value == BoaErrorCode.UNKNOWN
class TestSubmitToBoa(OsfTestCase):
-
def setUp(self):
super().setUp()
- self.host = 'http://locahost:9999/boa/?q=boa/api'
- self.username = 'fake-boa-username'
- self.password = 'fake-boa-password'
+ self.host = "http://locahost:9999/boa/?q=boa/api"
+ self.username = "fake-boa-username"
+ self.password = "fake-boa-password"
self.user_guid = AuthUserFactory()._id
self.project_guid = ProjectFactory()._id
- self.query_dataset = '2023 Oct / Fake Boa Dataset (small)'
- self.query_file_name = 'fake_boa_script.boa'
+ self.query_dataset = "2023 Oct / Fake Boa Dataset (small)"
+ self.query_file_name = "fake_boa_script.boa"
self.file_size = 255
- self.file_full_path = '/fake_boa_folder/fake_boa_script.boa'
- self.query_download_url = f'http://localhost:7777/v1/resources/{self.project_guid}/providers/osfstorage/1a2b3c4d'
- self.output_upload_url = f'http://localhost:7777/v1/resources/{self.project_guid}/providers/osfstorage/?kind=file'
+ self.file_full_path = "/fake_boa_folder/fake_boa_script.boa"
+ self.query_download_url = f"http://localhost:7777/v1/resources/{self.project_guid}/providers/osfstorage/1a2b3c4d"
+ self.output_upload_url = f"http://localhost:7777/v1/resources/{self.project_guid}/providers/osfstorage/?kind=file"
def tearDown(self):
super().tearDown()
def test_submit_to_boa_async_called(self):
with mock.patch(
- 'addons.boa.tasks.submit_to_boa_async',
- new_callable=AsyncMock,
- return_value=BoaErrorCode.NO_ERROR
+ "addons.boa.tasks.submit_to_boa_async",
+ new_callable=AsyncMock,
+ return_value=BoaErrorCode.NO_ERROR,
) as mock_submit_to_boa_async:
return_value = submit_to_boa(
self.host,
@@ -127,7 +135,7 @@ def test_submit_to_boa_async_called(self):
self.file_size,
self.file_full_path,
self.query_download_url,
- self.output_upload_url
+ self.output_upload_url,
)
assert return_value == BoaErrorCode.NO_ERROR
mock_submit_to_boa_async.assert_called()
@@ -136,34 +144,33 @@ def test_submit_to_boa_async_called(self):
@pytest.mark.django_db
@pytest.mark.asyncio
class TestSubmitToBoaAsync(OsfTestCase):
-
def setUp(self):
super().setUp()
- self.host = 'http://locahost:9999/boa/?q=boa/api'
- self.username = 'fake-boa-username'
- self.password = 'fake-boa-password'
+ self.host = "http://locahost:9999/boa/?q=boa/api"
+ self.username = "fake-boa-username"
+ self.password = "fake-boa-password"
self.user = AuthUserFactory()
self.user_guid = self.user._id
self.user_cookie = self.user.get_or_create_cookie()
self.project_guid = ProjectFactory()._id
- self.project_url = f'{osf_settings.DOMAIN}{self.project_guid}/'
- self.query_dataset = '2023 Oct / Fake Boa Dataset (small)'
- self.query_file_name = 'fake_boa_script.boa'
+ self.project_url = f"{osf_settings.DOMAIN}{self.project_guid}/"
+ self.query_dataset = "2023 Oct / Fake Boa Dataset (small)"
+ self.query_file_name = "fake_boa_script.boa"
self.file_size = 255
self.file_size_too_large = boa_settings.MAX_SUBMISSION_SIZE + 255
- self.output_file_name = 'fake_boa_script_results.txt'
- self.file_full_path = '/fake_boa_folder/fake_boa_script.boa'
- self.query_download_url = f'http://localhost:7777/v1/resources/{self.project_guid}/providers/osfstorage/1a2b3c4d'
- self.output_upload_url = f'http://localhost:7777/v1/resources/{self.project_guid}/providers/osfstorage/?kind=file'
+ self.output_file_name = "fake_boa_script_results.txt"
+ self.file_full_path = "/fake_boa_folder/fake_boa_script.boa"
+ self.query_download_url = f"http://localhost:7777/v1/resources/{self.project_guid}/providers/osfstorage/1a2b3c4d"
+ self.output_upload_url = f"http://localhost:7777/v1/resources/{self.project_guid}/providers/osfstorage/?kind=file"
self.mock_resp = mock.Mock()
- self.mock_resp.read.return_value = 'fake-boa-query-string'
+ self.mock_resp.read.return_value = "fake-boa-query-string"
self.mock_job = mock.Mock()
- self.mock_job.id = '1a2b3c4d5e6f7g8'
+ self.mock_job.id = "1a2b3c4d5e6f7g8"
self.mock_job.is_running.side_effect = [True, True, True, True, False]
self.mock_job.refresh.return_value = None
self.mock_job.compiler_status = CompilerStatus.FINISHED
self.mock_job.exec_status = ExecutionStatus.FINISHED
- self.mock_job.output.return_value = 'fake-boa-output-string'
+ self.mock_job.output.return_value = "fake-boa-output-string"
boa_settings.REFRESH_JOB_INTERVAL = DEFAULT_REFRESH_JOB_INTERVAL
boa_settings.MAX_JOB_WAITING_TIME = DEFAULT_MAX_JOB_WAITING_TIME
@@ -171,16 +178,30 @@ def tearDown(self):
super().tearDown()
async def test_submit_success(self):
- with mock.patch('osf.models.user.OSFUser.objects.get', return_value=self.user), \
- mock.patch('osf.models.user.OSFUser.get_or_create_cookie', return_value=self.user_cookie), \
- mock.patch('urllib.request.urlopen', side_effect=[self.mock_resp, self.mock_resp]), \
- mock.patch('boaapi.boa_client.BoaClient.login', return_value=None), \
- mock.patch('boaapi.boa_client.BoaClient.get_dataset', return_value=self.query_dataset), \
- mock.patch('boaapi.boa_client.BoaClient.query', return_value=self.mock_job), \
- mock.patch('boaapi.boa_client.BoaClient.close', return_value=None) as mock_close, \
- mock.patch('asyncio.sleep', new_callable=AsyncMock, return_value=None) as mock_async_sleep, \
- mock.patch('addons.boa.tasks.send_mail', return_value=None) as mock_send_mail, \
- mock.patch('addons.boa.tasks.handle_boa_error', return_value=None) as mock_handle_boa_error:
+ with mock.patch(
+ "osf.models.user.OSFUser.objects.get", return_value=self.user
+ ), mock.patch(
+ "osf.models.user.OSFUser.get_or_create_cookie",
+ return_value=self.user_cookie,
+ ), mock.patch(
+ "urllib.request.urlopen",
+ side_effect=[self.mock_resp, self.mock_resp],
+ ), mock.patch(
+ "boaapi.boa_client.BoaClient.login", return_value=None
+ ), mock.patch(
+ "boaapi.boa_client.BoaClient.get_dataset",
+ return_value=self.query_dataset,
+ ), mock.patch(
+ "boaapi.boa_client.BoaClient.query", return_value=self.mock_job
+ ), mock.patch(
+ "boaapi.boa_client.BoaClient.close", return_value=None
+ ) as mock_close, mock.patch(
+ "asyncio.sleep", new_callable=AsyncMock, return_value=None
+ ) as mock_async_sleep, mock.patch(
+ "addons.boa.tasks.send_mail", return_value=None
+ ) as mock_send_mail, mock.patch(
+ "addons.boa.tasks.handle_boa_error", return_value=None
+ ) as mock_handle_boa_error:
return_value = await submit_to_boa_async(
self.host,
self.username,
@@ -215,11 +236,17 @@ async def test_submit_success(self):
mock_handle_boa_error.assert_not_called()
async def test_download_error(self):
- http_404 = HTTPError(self.host, 404, 'Not Found', HTTPMessage(), None)
- with mock.patch('osf.models.user.OSFUser.objects.get', return_value=self.user), \
- mock.patch('osf.models.user.OSFUser.get_or_create_cookie', return_value=self.user_cookie), \
- mock.patch('urllib.request.urlopen', side_effect=http_404), \
- mock.patch('addons.boa.tasks.handle_boa_error', return_value=None) as mock_handle_boa_error:
+ http_404 = HTTPError(self.host, 404, "Not Found", HTTPMessage(), None)
+ with mock.patch(
+ "osf.models.user.OSFUser.objects.get", return_value=self.user
+ ), mock.patch(
+ "osf.models.user.OSFUser.get_or_create_cookie",
+ return_value=self.user_cookie,
+ ), mock.patch(
+ "urllib.request.urlopen", side_effect=http_404
+ ), mock.patch(
+ "addons.boa.tasks.handle_boa_error", return_value=None
+ ) as mock_handle_boa_error:
return_value = await submit_to_boa_async(
self.host,
self.username,
@@ -245,12 +272,20 @@ async def test_download_error(self):
)
async def test_login_error(self):
- with mock.patch('osf.models.user.OSFUser.objects.get', return_value=self.user), \
- mock.patch('osf.models.user.OSFUser.get_or_create_cookie', return_value=self.user_cookie), \
- mock.patch('urllib.request.urlopen', return_value=self.mock_resp), \
- mock.patch('boaapi.boa_client.BoaClient.login', side_effect=BoaException()) as mock_login, \
- mock.patch('boaapi.boa_client.BoaClient.close', return_value=None) as mock_close, \
- mock.patch('addons.boa.tasks.handle_boa_error', return_value=None) as mock_handle_boa_error:
+ with mock.patch(
+ "osf.models.user.OSFUser.objects.get", return_value=self.user
+ ), mock.patch(
+ "osf.models.user.OSFUser.get_or_create_cookie",
+ return_value=self.user_cookie,
+ ), mock.patch(
+ "urllib.request.urlopen", return_value=self.mock_resp
+ ), mock.patch(
+ "boaapi.boa_client.BoaClient.login", side_effect=BoaException()
+ ) as mock_login, mock.patch(
+ "boaapi.boa_client.BoaClient.close", return_value=None
+ ) as mock_close, mock.patch(
+ "addons.boa.tasks.handle_boa_error", return_value=None
+ ) as mock_handle_boa_error:
return_value = await submit_to_boa_async(
self.host,
self.username,
@@ -278,13 +313,23 @@ async def test_login_error(self):
)
async def test_data_set_error(self):
- with mock.patch('osf.models.user.OSFUser.objects.get', return_value=self.user), \
- mock.patch('osf.models.user.OSFUser.get_or_create_cookie', return_value=self.user_cookie), \
- mock.patch('urllib.request.urlopen', return_value=self.mock_resp), \
- mock.patch('boaapi.boa_client.BoaClient.login', return_value=None), \
- mock.patch('boaapi.boa_client.BoaClient.get_dataset', side_effect=BoaException()) as mock_get_dataset, \
- mock.patch('boaapi.boa_client.BoaClient.close', return_value=None) as mock_close, \
- mock.patch('addons.boa.tasks.handle_boa_error', return_value=None) as mock_handle_boa_error:
+ with mock.patch(
+ "osf.models.user.OSFUser.objects.get", return_value=self.user
+ ), mock.patch(
+ "osf.models.user.OSFUser.get_or_create_cookie",
+ return_value=self.user_cookie,
+ ), mock.patch(
+ "urllib.request.urlopen", return_value=self.mock_resp
+ ), mock.patch(
+ "boaapi.boa_client.BoaClient.login", return_value=None
+ ), mock.patch(
+ "boaapi.boa_client.BoaClient.get_dataset",
+ side_effect=BoaException(),
+ ) as mock_get_dataset, mock.patch(
+ "boaapi.boa_client.BoaClient.close", return_value=None
+ ) as mock_close, mock.patch(
+ "addons.boa.tasks.handle_boa_error", return_value=None
+ ) as mock_handle_boa_error:
return_value = await submit_to_boa_async(
self.host,
self.username,
@@ -312,14 +357,25 @@ async def test_data_set_error(self):
)
async def test_submit_error(self):
- with mock.patch('osf.models.user.OSFUser.objects.get', return_value=self.user), \
- mock.patch('osf.models.user.OSFUser.get_or_create_cookie', return_value=self.user_cookie), \
- mock.patch('urllib.request.urlopen', return_value=self.mock_resp), \
- mock.patch('boaapi.boa_client.BoaClient.login', return_value=None), \
- mock.patch('boaapi.boa_client.BoaClient.get_dataset', return_value=self.query_dataset), \
- mock.patch('boaapi.boa_client.BoaClient.query', side_effect=BoaException()) as mock_query, \
- mock.patch('boaapi.boa_client.BoaClient.close', return_value=None) as mock_close, \
- mock.patch('addons.boa.tasks.handle_boa_error', return_value=None) as mock_handle_boa_error:
+ with mock.patch(
+ "osf.models.user.OSFUser.objects.get", return_value=self.user
+ ), mock.patch(
+ "osf.models.user.OSFUser.get_or_create_cookie",
+ return_value=self.user_cookie,
+ ), mock.patch(
+ "urllib.request.urlopen", return_value=self.mock_resp
+ ), mock.patch(
+ "boaapi.boa_client.BoaClient.login", return_value=None
+ ), mock.patch(
+ "boaapi.boa_client.BoaClient.get_dataset",
+ return_value=self.query_dataset,
+ ), mock.patch(
+ "boaapi.boa_client.BoaClient.query", side_effect=BoaException()
+ ) as mock_query, mock.patch(
+ "boaapi.boa_client.BoaClient.close", return_value=None
+ ) as mock_close, mock.patch(
+ "addons.boa.tasks.handle_boa_error", return_value=None
+ ) as mock_handle_boa_error:
return_value = await submit_to_boa_async(
self.host,
self.username,
@@ -349,15 +405,27 @@ async def test_submit_error(self):
async def test_compile_error(self):
self.mock_job.compiler_status = CompilerStatus.ERROR
self.mock_job.exec_status = ExecutionStatus.WAITING
- with mock.patch('osf.models.user.OSFUser.objects.get', return_value=self.user), \
- mock.patch('osf.models.user.OSFUser.get_or_create_cookie', return_value=self.user_cookie), \
- mock.patch('urllib.request.urlopen', return_value=self.mock_resp), \
- mock.patch('boaapi.boa_client.BoaClient.login', return_value=None), \
- mock.patch('boaapi.boa_client.BoaClient.get_dataset', return_value=self.query_dataset), \
- mock.patch('boaapi.boa_client.BoaClient.query', return_value=self.mock_job), \
- mock.patch('boaapi.boa_client.BoaClient.close', return_value=None) as mock_close, \
- mock.patch('asyncio.sleep', new_callable=AsyncMock, return_value=None), \
- mock.patch('addons.boa.tasks.handle_boa_error', return_value=None) as mock_handle_boa_error:
+ with mock.patch(
+ "osf.models.user.OSFUser.objects.get", return_value=self.user
+ ), mock.patch(
+ "osf.models.user.OSFUser.get_or_create_cookie",
+ return_value=self.user_cookie,
+ ), mock.patch(
+ "urllib.request.urlopen", return_value=self.mock_resp
+ ), mock.patch(
+ "boaapi.boa_client.BoaClient.login", return_value=None
+ ), mock.patch(
+ "boaapi.boa_client.BoaClient.get_dataset",
+ return_value=self.query_dataset,
+ ), mock.patch(
+ "boaapi.boa_client.BoaClient.query", return_value=self.mock_job
+ ), mock.patch(
+ "boaapi.boa_client.BoaClient.close", return_value=None
+ ) as mock_close, mock.patch(
+ "asyncio.sleep", new_callable=AsyncMock, return_value=None
+ ), mock.patch(
+ "addons.boa.tasks.handle_boa_error", return_value=None
+ ) as mock_handle_boa_error:
return_value = await submit_to_boa_async(
self.host,
self.username,
@@ -387,15 +455,27 @@ async def test_compile_error(self):
async def test_execute_error(self):
self.mock_job.compiler_status = CompilerStatus.FINISHED
self.mock_job.exec_status = ExecutionStatus.ERROR
- with mock.patch('osf.models.user.OSFUser.objects.get', return_value=self.user), \
- mock.patch('osf.models.user.OSFUser.get_or_create_cookie', return_value=self.user_cookie), \
- mock.patch('urllib.request.urlopen', return_value=self.mock_resp), \
- mock.patch('boaapi.boa_client.BoaClient.login', return_value=None), \
- mock.patch('boaapi.boa_client.BoaClient.get_dataset', return_value=self.query_dataset), \
- mock.patch('boaapi.boa_client.BoaClient.query', return_value=self.mock_job), \
- mock.patch('boaapi.boa_client.BoaClient.close', return_value=None) as mock_close, \
- mock.patch('asyncio.sleep', new_callable=AsyncMock, return_value=None), \
- mock.patch('addons.boa.tasks.handle_boa_error', return_value=None) as mock_handle_boa_error:
+ with mock.patch(
+ "osf.models.user.OSFUser.objects.get", return_value=self.user
+ ), mock.patch(
+ "osf.models.user.OSFUser.get_or_create_cookie",
+ return_value=self.user_cookie,
+ ), mock.patch(
+ "urllib.request.urlopen", return_value=self.mock_resp
+ ), mock.patch(
+ "boaapi.boa_client.BoaClient.login", return_value=None
+ ), mock.patch(
+ "boaapi.boa_client.BoaClient.get_dataset",
+ return_value=self.query_dataset,
+ ), mock.patch(
+ "boaapi.boa_client.BoaClient.query", return_value=self.mock_job
+ ), mock.patch(
+ "boaapi.boa_client.BoaClient.close", return_value=None
+ ) as mock_close, mock.patch(
+ "asyncio.sleep", new_callable=AsyncMock, return_value=None
+ ), mock.patch(
+ "addons.boa.tasks.handle_boa_error", return_value=None
+ ) as mock_handle_boa_error:
return_value = await submit_to_boa_async(
self.host,
self.username,
@@ -424,15 +504,27 @@ async def test_execute_error(self):
async def test_output_error_(self):
self.mock_job.output.side_effect = BoaException()
- with mock.patch('osf.models.user.OSFUser.objects.get', return_value=self.user), \
- mock.patch('osf.models.user.OSFUser.get_or_create_cookie', return_value=self.user_cookie), \
- mock.patch('urllib.request.urlopen', return_value=self.mock_resp), \
- mock.patch('boaapi.boa_client.BoaClient.login', return_value=None), \
- mock.patch('boaapi.boa_client.BoaClient.get_dataset', return_value=self.query_dataset), \
- mock.patch('boaapi.boa_client.BoaClient.query', return_value=self.mock_job), \
- mock.patch('boaapi.boa_client.BoaClient.close', return_value=None) as mock_close, \
- mock.patch('asyncio.sleep', new_callable=AsyncMock, return_value=None), \
- mock.patch('addons.boa.tasks.handle_boa_error', return_value=None) as mock_handle_boa_error:
+ with mock.patch(
+ "osf.models.user.OSFUser.objects.get", return_value=self.user
+ ), mock.patch(
+ "osf.models.user.OSFUser.get_or_create_cookie",
+ return_value=self.user_cookie,
+ ), mock.patch(
+ "urllib.request.urlopen", return_value=self.mock_resp
+ ), mock.patch(
+ "boaapi.boa_client.BoaClient.login", return_value=None
+ ), mock.patch(
+ "boaapi.boa_client.BoaClient.get_dataset",
+ return_value=self.query_dataset,
+ ), mock.patch(
+ "boaapi.boa_client.BoaClient.query", return_value=self.mock_job
+ ), mock.patch(
+ "boaapi.boa_client.BoaClient.close", return_value=None
+ ) as mock_close, mock.patch(
+ "asyncio.sleep", new_callable=AsyncMock, return_value=None
+ ), mock.patch(
+ "addons.boa.tasks.handle_boa_error", return_value=None
+ ) as mock_handle_boa_error:
return_value = await submit_to_boa_async(
self.host,
self.username,
@@ -461,16 +553,28 @@ async def test_output_error_(self):
)
async def test_upload_error_conflict(self):
- http_409 = HTTPError(self.host, 409, 'Conflict', HTTPMessage(), None)
- with mock.patch('osf.models.user.OSFUser.objects.get', return_value=self.user), \
- mock.patch('osf.models.user.OSFUser.get_or_create_cookie', return_value=self.user_cookie), \
- mock.patch('urllib.request.urlopen', side_effect=[self.mock_resp, http_409]), \
- mock.patch('boaapi.boa_client.BoaClient.login', return_value=None), \
- mock.patch('boaapi.boa_client.BoaClient.get_dataset', return_value=self.query_dataset), \
- mock.patch('boaapi.boa_client.BoaClient.query', return_value=self.mock_job), \
- mock.patch('boaapi.boa_client.BoaClient.close', return_value=None) as mock_close, \
- mock.patch('asyncio.sleep', new_callable=AsyncMock, return_value=None), \
- mock.patch('addons.boa.tasks.handle_boa_error', return_value=None) as mock_handle_boa_error:
+ http_409 = HTTPError(self.host, 409, "Conflict", HTTPMessage(), None)
+ with mock.patch(
+ "osf.models.user.OSFUser.objects.get", return_value=self.user
+ ), mock.patch(
+ "osf.models.user.OSFUser.get_or_create_cookie",
+ return_value=self.user_cookie,
+ ), mock.patch(
+ "urllib.request.urlopen", side_effect=[self.mock_resp, http_409]
+ ), mock.patch(
+ "boaapi.boa_client.BoaClient.login", return_value=None
+ ), mock.patch(
+ "boaapi.boa_client.BoaClient.get_dataset",
+ return_value=self.query_dataset,
+ ), mock.patch(
+ "boaapi.boa_client.BoaClient.query", return_value=self.mock_job
+ ), mock.patch(
+ "boaapi.boa_client.BoaClient.close", return_value=None
+ ) as mock_close, mock.patch(
+ "asyncio.sleep", new_callable=AsyncMock, return_value=None
+ ), mock.patch(
+ "addons.boa.tasks.handle_boa_error", return_value=None
+ ) as mock_handle_boa_error:
return_value = await submit_to_boa_async(
self.host,
self.username,
@@ -499,16 +603,30 @@ async def test_upload_error_conflict(self):
)
async def test_upload_error_other(self):
- http_503 = HTTPError(self.host, 503, 'Service Unavailable', HTTPMessage(), None)
- with mock.patch('osf.models.user.OSFUser.objects.get', return_value=self.user), \
- mock.patch('osf.models.user.OSFUser.get_or_create_cookie', return_value=self.user_cookie), \
- mock.patch('urllib.request.urlopen', side_effect=[self.mock_resp, http_503]), \
- mock.patch('boaapi.boa_client.BoaClient.login', return_value=None), \
- mock.patch('boaapi.boa_client.BoaClient.get_dataset', return_value=self.query_dataset), \
- mock.patch('boaapi.boa_client.BoaClient.query', return_value=self.mock_job), \
- mock.patch('boaapi.boa_client.BoaClient.close', return_value=None) as mock_close, \
- mock.patch('asyncio.sleep', new_callable=AsyncMock, return_value=None), \
- mock.patch('addons.boa.tasks.handle_boa_error', return_value=None) as mock_handle_boa_error:
+ http_503 = HTTPError(
+ self.host, 503, "Service Unavailable", HTTPMessage(), None
+ )
+ with mock.patch(
+ "osf.models.user.OSFUser.objects.get", return_value=self.user
+ ), mock.patch(
+ "osf.models.user.OSFUser.get_or_create_cookie",
+ return_value=self.user_cookie,
+ ), mock.patch(
+ "urllib.request.urlopen", side_effect=[self.mock_resp, http_503]
+ ), mock.patch(
+ "boaapi.boa_client.BoaClient.login", return_value=None
+ ), mock.patch(
+ "boaapi.boa_client.BoaClient.get_dataset",
+ return_value=self.query_dataset,
+ ), mock.patch(
+ "boaapi.boa_client.BoaClient.query", return_value=self.mock_job
+ ), mock.patch(
+ "boaapi.boa_client.BoaClient.close", return_value=None
+ ) as mock_close, mock.patch(
+ "asyncio.sleep", new_callable=AsyncMock, return_value=None
+ ), mock.patch(
+ "addons.boa.tasks.handle_boa_error", return_value=None
+ ) as mock_handle_boa_error:
return_value = await submit_to_boa_async(
self.host,
self.username,
@@ -537,9 +655,14 @@ async def test_upload_error_other(self):
)
async def test_file_too_large_error(self):
- with mock.patch('osf.models.user.OSFUser.objects.get', return_value=self.user), \
- mock.patch('osf.models.user.OSFUser.get_or_create_cookie', return_value=self.user_cookie), \
- mock.patch('addons.boa.tasks.handle_boa_error', return_value=None) as mock_handle_boa_error:
+ with mock.patch(
+ "osf.models.user.OSFUser.objects.get", return_value=self.user
+ ), mock.patch(
+ "osf.models.user.OSFUser.get_or_create_cookie",
+ return_value=self.user_cookie,
+ ), mock.patch(
+ "addons.boa.tasks.handle_boa_error", return_value=None
+ ) as mock_handle_boa_error:
return_value = await submit_to_boa_async(
self.host,
self.username,
@@ -568,14 +691,25 @@ async def test_file_too_large_error(self):
async def test_job_timeout_error(self):
boa_settings.REFRESH_JOB_INTERVAL = 1
boa_settings.MAX_JOB_WAITING_TIME = 1
- with mock.patch('osf.models.user.OSFUser.objects.get', return_value=self.user), \
- mock.patch('osf.models.user.OSFUser.get_or_create_cookie', return_value=self.user_cookie), \
- mock.patch('urllib.request.urlopen', return_value=self.mock_resp), \
- mock.patch('boaapi.boa_client.BoaClient.login', return_value=None), \
- mock.patch('boaapi.boa_client.BoaClient.get_dataset', return_value=self.query_dataset), \
- mock.patch('boaapi.boa_client.BoaClient.query', return_value=self.mock_job), \
- mock.patch('boaapi.boa_client.BoaClient.close', return_value=None) as mock_close, \
- mock.patch('addons.boa.tasks.handle_boa_error', return_value=None) as mock_handle_boa_error:
+ with mock.patch(
+ "osf.models.user.OSFUser.objects.get", return_value=self.user
+ ), mock.patch(
+ "osf.models.user.OSFUser.get_or_create_cookie",
+ return_value=self.user_cookie,
+ ), mock.patch(
+ "urllib.request.urlopen", return_value=self.mock_resp
+ ), mock.patch(
+ "boaapi.boa_client.BoaClient.login", return_value=None
+ ), mock.patch(
+ "boaapi.boa_client.BoaClient.get_dataset",
+ return_value=self.query_dataset,
+ ), mock.patch(
+ "boaapi.boa_client.BoaClient.query", return_value=self.mock_job
+ ), mock.patch(
+ "boaapi.boa_client.BoaClient.close", return_value=None
+ ) as mock_close, mock.patch(
+ "addons.boa.tasks.handle_boa_error", return_value=None
+ ) as mock_handle_boa_error:
return_value = await submit_to_boa_async(
self.host,
self.username,
diff --git a/addons/boa/tests/test_views.py b/addons/boa/tests/test_views.py
index 3989386f739..cc06307da0a 100644
--- a/addons/boa/tests/test_views.py
+++ b/addons/boa/tests/test_views.py
@@ -2,7 +2,10 @@
import pytest
from rest_framework import status as http_status
-from addons.base.tests.views import OAuthAddonAuthViewsTestCaseMixin, OAuthAddonConfigViewsTestCaseMixin
+from addons.base.tests.views import (
+ OAuthAddonAuthViewsTestCaseMixin,
+ OAuthAddonConfigViewsTestCaseMixin,
+)
from addons.boa.tests.factories import BOA_HOST, BOA_PASSWORD
from addons.boa.tests.utils import BoaBasicAuthAddonTestCase
from addons.boa.boa_error_code import BoaErrorCode
@@ -15,8 +18,9 @@
pytestmark = pytest.mark.django_db
-class TestAuthViews(BoaBasicAuthAddonTestCase, OAuthAddonAuthViewsTestCaseMixin, OsfTestCase):
-
+class TestAuthViews(
+ BoaBasicAuthAddonTestCase, OAuthAddonAuthViewsTestCaseMixin, OsfTestCase
+):
def test_oauth_start(self):
"""Not applicable to non-oauth add-ons."""
pass
@@ -26,12 +30,17 @@ def test_oauth_finish(self):
pass
-class TestConfigViews(BoaBasicAuthAddonTestCase, OAuthAddonConfigViewsTestCaseMixin, OsfTestCase):
-
+class TestConfigViews(
+ BoaBasicAuthAddonTestCase, OAuthAddonConfigViewsTestCaseMixin, OsfTestCase
+):
def setUp(self):
super().setUp()
- self.mock_boa_client_login = mock.patch('boaapi.boa_client.BoaClient.login')
- self.mock_boa_client_close = mock.patch('boaapi.boa_client.BoaClient.close')
+ self.mock_boa_client_login = mock.patch(
+ "boaapi.boa_client.BoaClient.login"
+ )
+ self.mock_boa_client_close = mock.patch(
+ "boaapi.boa_client.BoaClient.close"
+ )
self.mock_boa_client_login.start()
self.mock_boa_client_close.start()
@@ -50,204 +59,243 @@ def test_set_config(self):
def test_get_config(self):
"""Lacking coverage for remote computing add-ons and thus replaced by:
- * ``test_get_config_owner_with_external_account()``
- * ``test_get_config_owner_without_external_account()``
- * ``test_get_config_write_contrib_with_external_account()``
- * ``test_get_config_write_contrib_without_external_account()``
- * ``test_get_config_admin_contrib_with_external_account()``
- * ``test_get_config_admin_contrib_without_external_account()``
+ * ``test_get_config_owner_with_external_account()``
+ * ``test_get_config_owner_without_external_account()``
+ * ``test_get_config_write_contrib_with_external_account()``
+ * ``test_get_config_write_contrib_without_external_account()``
+ * ``test_get_config_admin_contrib_with_external_account()``
+ * ``test_get_config_admin_contrib_without_external_account()``
"""
pass
def test_get_config_unauthorized(self):
"""Lacking coverage for remote computing add-ons and thus replaced by:
- * ``test_get_config_read_contrib_with_valid_credentials()``
- * ``test_get_config_read_contrib_without_valid_credentials()``
+ * ``test_get_config_read_contrib_with_valid_credentials()``
+ * ``test_get_config_read_contrib_without_valid_credentials()``
"""
pass
def test_get_config_owner_with_external_account(self):
-
self.node_settings.set_auth(self.external_account, self.user)
serialized = self.Serializer().serialize_settings(
- self.node_settings,
- self.user,
- self.client
+ self.node_settings, self.user, self.client
)
assert self.node_settings.external_account is not None
- assert serialized['validCredentials'] is True
+ assert serialized["validCredentials"] is True
- url = self.project.api_url_for(f'{self.ADDON_SHORT_NAME}_get_config')
+ url = self.project.api_url_for(f"{self.ADDON_SHORT_NAME}_get_config")
res = self.app.get(url, auth=self.user.auth)
assert res.status_code == http_status.HTTP_200_OK
- assert 'result' in res.json
- assert serialized == res.json['result']
+ assert "result" in res.json
+ assert serialized == res.json["result"]
def test_get_config_owner_without_external_account(self):
-
serialized = self.Serializer().serialize_settings(
- self.node_settings,
- self.user,
- self.client
+ self.node_settings, self.user, self.client
)
assert self.node_settings.external_account is None
- assert serialized['validCredentials'] is False
+ assert serialized["validCredentials"] is False
- url = self.project.api_url_for(f'{self.ADDON_SHORT_NAME}_get_config')
+ url = self.project.api_url_for(f"{self.ADDON_SHORT_NAME}_get_config")
res = self.app.get(url, auth=self.user.auth)
assert res.status_code == http_status.HTTP_200_OK
- assert 'result' in res.json
- assert serialized == res.json['result']
+ assert "result" in res.json
+ assert serialized == res.json["result"]
def test_get_config_write_contrib_with_external_account(self):
-
user_write = AuthUserFactory()
self.node_settings.set_auth(self.external_account, self.user)
- self.project.add_contributor(user_write, permissions=permissions.WRITE, auth=self.auth, save=True)
- serialized = self.Serializer().serialize_settings(
- self.node_settings,
+ self.project.add_contributor(
user_write,
- self.client
+ permissions=permissions.WRITE,
+ auth=self.auth,
+ save=True,
+ )
+ serialized = self.Serializer().serialize_settings(
+ self.node_settings, user_write, self.client
)
assert self.node_settings.external_account is not None
- assert serialized['validCredentials'] is True
+ assert serialized["validCredentials"] is True
- url = self.project.api_url_for(f'{self.ADDON_SHORT_NAME}_get_config')
+ url = self.project.api_url_for(f"{self.ADDON_SHORT_NAME}_get_config")
res = self.app.get(url, auth=user_write.auth)
assert res.status_code == http_status.HTTP_200_OK
- assert 'result' in res.json
- assert serialized == res.json['result']
+ assert "result" in res.json
+ assert serialized == res.json["result"]
def test_get_config_write_contrib_without_external_account(self):
-
user_write = AuthUserFactory()
- self.project.add_contributor(user_write, permissions=permissions.WRITE, auth=self.auth, save=True)
- serialized = self.Serializer().serialize_settings(
- self.node_settings,
+ self.project.add_contributor(
user_write,
- self.client
+ permissions=permissions.WRITE,
+ auth=self.auth,
+ save=True,
+ )
+ serialized = self.Serializer().serialize_settings(
+ self.node_settings, user_write, self.client
)
assert self.node_settings.external_account is None
- assert serialized['validCredentials'] is False
+ assert serialized["validCredentials"] is False
- url = self.project.api_url_for(f'{self.ADDON_SHORT_NAME}_get_config')
+ url = self.project.api_url_for(f"{self.ADDON_SHORT_NAME}_get_config")
res = self.app.get(url, auth=user_write.auth)
assert res.status_code == http_status.HTTP_200_OK
- assert 'result' in res.json
- assert serialized == res.json['result']
+ assert "result" in res.json
+ assert serialized == res.json["result"]
def test_get_config_admin_contrib_with_external_account(self):
-
user_admin = AuthUserFactory()
self.node_settings.set_auth(self.external_account, self.user)
- self.project.add_contributor(user_admin, permissions=permissions.ADMIN, auth=self.auth, save=True)
- serialized = self.Serializer().serialize_settings(
- self.node_settings,
+ self.project.add_contributor(
user_admin,
- self.client
+ permissions=permissions.ADMIN,
+ auth=self.auth,
+ save=True,
+ )
+ serialized = self.Serializer().serialize_settings(
+ self.node_settings, user_admin, self.client
)
assert self.node_settings.external_account is not None
- assert serialized['validCredentials'] is True
+ assert serialized["validCredentials"] is True
- url = self.project.api_url_for(f'{self.ADDON_SHORT_NAME}_get_config')
+ url = self.project.api_url_for(f"{self.ADDON_SHORT_NAME}_get_config")
res = self.app.get(url, auth=user_admin.auth)
assert res.status_code == http_status.HTTP_200_OK
- assert 'result' in res.json
- assert serialized == res.json['result']
+ assert "result" in res.json
+ assert serialized == res.json["result"]
def test_get_config_admin_contrib_without_external_account(self):
-
user_admin = AuthUserFactory()
- self.project.add_contributor(user_admin, permissions=permissions.ADMIN, auth=self.auth, save=True)
- serialized = self.Serializer().serialize_settings(
- self.node_settings,
+ self.project.add_contributor(
user_admin,
- self.client
+ permissions=permissions.ADMIN,
+ auth=self.auth,
+ save=True,
+ )
+ serialized = self.Serializer().serialize_settings(
+ self.node_settings, user_admin, self.client
)
assert self.node_settings.external_account is None
- assert serialized['validCredentials'] is False
+ assert serialized["validCredentials"] is False
- url = self.project.api_url_for(f'{self.ADDON_SHORT_NAME}_get_config')
+ url = self.project.api_url_for(f"{self.ADDON_SHORT_NAME}_get_config")
res = self.app.get(url, auth=user_admin.auth)
assert res.status_code == http_status.HTTP_200_OK
- assert 'result' in res.json
- assert serialized == res.json['result']
+ assert "result" in res.json
+ assert serialized == res.json["result"]
def test_get_config_read_contrib_with_valid_credentials(self):
-
user_read_only = AuthUserFactory()
- self.project.add_contributor(user_read_only, permissions=permissions.READ, auth=self.auth, save=True)
+ self.project.add_contributor(
+ user_read_only,
+ permissions=permissions.READ,
+ auth=self.auth,
+ save=True,
+ )
- url = self.project.api_url_for(f'{self.ADDON_SHORT_NAME}_get_config')
- with mock.patch.object(type(self.Serializer()), 'credentials_are_valid', return_value=True):
+ url = self.project.api_url_for(f"{self.ADDON_SHORT_NAME}_get_config")
+ with mock.patch.object(
+ type(self.Serializer()), "credentials_are_valid", return_value=True
+ ):
res = self.app.get(url, auth=user_read_only.auth)
assert res.status_code == http_status.HTTP_403_FORBIDDEN
def test_get_config_read_contrib_without_valid_credentials(self):
-
user_read_only = AuthUserFactory()
- self.project.add_contributor(user_read_only, permissions=permissions.READ, auth=self.auth, save=True)
+ self.project.add_contributor(
+ user_read_only,
+ permissions=permissions.READ,
+ auth=self.auth,
+ save=True,
+ )
- url = self.project.api_url_for(f'{self.ADDON_SHORT_NAME}_get_config')
- with mock.patch.object(type(self.Serializer()), 'credentials_are_valid', return_value=False):
+ url = self.project.api_url_for(f"{self.ADDON_SHORT_NAME}_get_config")
+ with mock.patch.object(
+ type(self.Serializer()),
+ "credentials_are_valid",
+ return_value=False,
+ ):
res = self.app.get(url, auth=user_read_only.auth)
assert res.status_code == http_status.HTTP_403_FORBIDDEN
class TestBoaSubmitViews(BoaBasicAuthAddonTestCase, OsfTestCase):
-
def setUp(self):
super().setUp()
- self.folder_name = 'fake_boa_folder'
- self.file_name = 'fake_boa_file.boa'
+ self.folder_name = "fake_boa_folder"
+ self.file_name = "fake_boa_file.boa"
self.file_size = 255
- self.dataset = 'fake_boa_dataset'
- self.download_url = f'{osf_settings.WATERBUTLER_URL}/v1/resources/{self.project._primary_key}/' \
- f'providers/osfstorage/1a2b3c4d5e6f7g8'
- self.upload_url = f'{osf_settings.WATERBUTLER_URL}/v1/resources/{self.project._id}/' \
- f'providers/osfstorage/8g7f6e5d4c3b2a1?kind=file'
- self.download_url_internal = f'{osf_settings.WATERBUTLER_INTERNAL_URL}/v1/resources/' \
- f'{self.project._primary_key}/providers/osfstorage/1a2b3c4d5e6f7g8'
- self.upload_url_internal = f'{osf_settings.WATERBUTLER_INTERNAL_URL}/v1/resources/' \
- f'{self.project._id}/providers/osfstorage/8g7f6e5d4c3b2a1?kind=file'
+ self.dataset = "fake_boa_dataset"
+ self.download_url = (
+ f"{osf_settings.WATERBUTLER_URL}/v1/resources/{self.project._primary_key}/"
+ f"providers/osfstorage/1a2b3c4d5e6f7g8"
+ )
+ self.upload_url = (
+ f"{osf_settings.WATERBUTLER_URL}/v1/resources/{self.project._id}/"
+ f"providers/osfstorage/8g7f6e5d4c3b2a1?kind=file"
+ )
+ self.download_url_internal = (
+ f"{osf_settings.WATERBUTLER_INTERNAL_URL}/v1/resources/"
+ f"{self.project._primary_key}/providers/osfstorage/1a2b3c4d5e6f7g8"
+ )
+ self.upload_url_internal = (
+ f"{osf_settings.WATERBUTLER_INTERNAL_URL}/v1/resources/"
+ f"{self.project._id}/providers/osfstorage/8g7f6e5d4c3b2a1?kind=file"
+ )
self.payload_sub_folder = {
- 'data': {
- 'links': {'download': self.download_url, },
- 'name': self.file_name,
- 'materialized': f'/{self.folder_name}/{self.file_name}',
- 'nodeId': self.project._id,
- 'sizeInt': self.file_size,
+ "data": {
+ "links": {
+ "download": self.download_url,
+ },
+ "name": self.file_name,
+ "materialized": f"/{self.folder_name}/{self.file_name}",
+ "nodeId": self.project._id,
+ "sizeInt": self.file_size,
},
- 'parent': {
- 'links': {'upload': self.upload_url, },
+ "parent": {
+ "links": {
+ "upload": self.upload_url,
+ },
},
- 'dataset': self.dataset,
+ "dataset": self.dataset,
}
self.payload_addon_root = {
- 'data': {
- 'links': {'download': self.download_url, },
- 'name': self.file_name,
- 'materialized': f'/{self.file_name}',
- 'nodeId': self.project._id,
- 'sizeInt': self.file_size,
+ "data": {
+ "links": {
+ "download": self.download_url,
+ },
+ "name": self.file_name,
+ "materialized": f"/{self.file_name}",
+ "nodeId": self.project._id,
+ "sizeInt": self.file_size,
},
- 'parent': {
- 'isAddonRoot': True,
+ "parent": {
+ "isAddonRoot": True,
},
- 'dataset': self.dataset,
+ "dataset": self.dataset,
}
def tearDown(self):
super().tearDown()
def test_boa_submit_job_from_addon_root(self):
- with mock.patch('addons.boa.tasks.submit_to_boa.s', return_value=BoaErrorCode.NO_ERROR) as mock_submit_s:
+ with mock.patch(
+ "addons.boa.tasks.submit_to_boa.s",
+ return_value=BoaErrorCode.NO_ERROR,
+ ) as mock_submit_s:
self.node_settings.set_auth(self.external_account, self.user)
base_url = self.project.osfstorage_region.waterbutler_url
- addon_root_url = waterbutler_api_url_for(self.project._id, 'osfstorage', _internal=True, base_url=base_url)
- upload_url_root = f'{addon_root_url}?kind=file'
- url = self.project.api_url_for('boa_submit_job')
- res = self.app.post(url, json=self.payload_addon_root, auth=self.user.auth)
+ addon_root_url = waterbutler_api_url_for(
+ self.project._id,
+ "osfstorage",
+ _internal=True,
+ base_url=base_url,
+ )
+ upload_url_root = f"{addon_root_url}?kind=file"
+ url = self.project.api_url_for("boa_submit_job")
+ res = self.app.post(
+ url, json=self.payload_addon_root, auth=self.user.auth
+ )
assert res.status_code == http_status.HTTP_200_OK
mock_submit_s.assert_called_with(
BOA_HOST,
@@ -258,16 +306,21 @@ def test_boa_submit_job_from_addon_root(self):
self.dataset,
self.file_name,
self.file_size,
- f'/{self.file_name}',
+ f"/{self.file_name}",
self.download_url_internal,
upload_url_root,
)
def test_boa_submit_job_from_sub_folder(self):
- with mock.patch('addons.boa.tasks.submit_to_boa.s', return_value=BoaErrorCode.NO_ERROR) as mock_submit_s:
+ with mock.patch(
+ "addons.boa.tasks.submit_to_boa.s",
+ return_value=BoaErrorCode.NO_ERROR,
+ ) as mock_submit_s:
self.node_settings.set_auth(self.external_account, self.user)
- url = self.project.api_url_for('boa_submit_job')
- res = self.app.post(url, json=self.payload_sub_folder, auth=self.user.auth)
+ url = self.project.api_url_for("boa_submit_job")
+ res = self.app.post(
+ url, json=self.payload_sub_folder, auth=self.user.auth
+ )
assert res.status_code == http_status.HTTP_200_OK
mock_submit_s.assert_called_with(
BOA_HOST,
@@ -278,18 +331,28 @@ def test_boa_submit_job_from_sub_folder(self):
self.dataset,
self.file_name,
self.file_size,
- f'/{self.folder_name}/{self.file_name}',
+ f"/{self.folder_name}/{self.file_name}",
self.download_url_internal,
self.upload_url_internal,
)
def test_boa_submit_job_admin_contrib(self):
- with mock.patch('addons.boa.tasks.submit_to_boa.s', return_value=BoaErrorCode.NO_ERROR) as mock_submit_s:
+ with mock.patch(
+ "addons.boa.tasks.submit_to_boa.s",
+ return_value=BoaErrorCode.NO_ERROR,
+ ) as mock_submit_s:
self.node_settings.set_auth(self.external_account, self.user)
user_admin = AuthUserFactory()
- self.project.add_contributor(user_admin, permissions=permissions.ADMIN, auth=self.auth, save=True)
- url = self.project.api_url_for('boa_submit_job')
- res = self.app.post(url, json=self.payload_sub_folder, auth=user_admin.auth)
+ self.project.add_contributor(
+ user_admin,
+ permissions=permissions.ADMIN,
+ auth=self.auth,
+ save=True,
+ )
+ url = self.project.api_url_for("boa_submit_job")
+ res = self.app.post(
+ url, json=self.payload_sub_folder, auth=user_admin.auth
+ )
assert res.status_code == http_status.HTTP_200_OK
mock_submit_s.assert_called_with(
BOA_HOST,
@@ -300,18 +363,28 @@ def test_boa_submit_job_admin_contrib(self):
self.dataset,
self.file_name,
self.file_size,
- f'/{self.folder_name}/{self.file_name}',
+ f"/{self.folder_name}/{self.file_name}",
self.download_url_internal,
self.upload_url_internal,
)
def test_boa_submit_job_write_contrib(self):
- with mock.patch('addons.boa.tasks.submit_to_boa.s', return_value=BoaErrorCode.NO_ERROR) as mock_submit_s:
+ with mock.patch(
+ "addons.boa.tasks.submit_to_boa.s",
+ return_value=BoaErrorCode.NO_ERROR,
+ ) as mock_submit_s:
self.node_settings.set_auth(self.external_account, self.user)
user_write = AuthUserFactory()
- self.project.add_contributor(user_write, permissions=permissions.WRITE, auth=self.auth, save=True)
- url = self.project.api_url_for('boa_submit_job')
- res = self.app.post(url, json=self.payload_sub_folder, auth=user_write.auth)
+ self.project.add_contributor(
+ user_write,
+ permissions=permissions.WRITE,
+ auth=self.auth,
+ save=True,
+ )
+ url = self.project.api_url_for("boa_submit_job")
+ res = self.app.post(
+ url, json=self.payload_sub_folder, auth=user_write.auth
+ )
assert res.status_code == http_status.HTTP_200_OK
mock_submit_s.assert_called_with(
BOA_HOST,
@@ -322,17 +395,27 @@ def test_boa_submit_job_write_contrib(self):
self.dataset,
self.file_name,
self.file_size,
- f'/{self.folder_name}/{self.file_name}',
+ f"/{self.folder_name}/{self.file_name}",
self.download_url_internal,
self.upload_url_internal,
)
def test_boa_submit_job_read_contrib(self):
- with mock.patch('addons.boa.tasks.submit_to_boa.s', return_value=BoaErrorCode.NO_ERROR) as mock_submit_s:
+ with mock.patch(
+ "addons.boa.tasks.submit_to_boa.s",
+ return_value=BoaErrorCode.NO_ERROR,
+ ) as mock_submit_s:
self.node_settings.set_auth(self.external_account, self.user)
user_read_only = AuthUserFactory()
- self.project.add_contributor(user_read_only, permissions=permissions.READ, auth=self.auth, save=True)
- url = self.project.api_url_for('boa_submit_job')
- res = self.app.post(url, json=self.payload_sub_folder, auth=user_read_only.auth)
+ self.project.add_contributor(
+ user_read_only,
+ permissions=permissions.READ,
+ auth=self.auth,
+ save=True,
+ )
+ url = self.project.api_url_for("boa_submit_job")
+ res = self.app.post(
+ url, json=self.payload_sub_folder, auth=user_read_only.auth
+ )
assert res.status_code == http_status.HTTP_403_FORBIDDEN
mock_submit_s.assert_not_called()
diff --git a/addons/boa/tests/utils.py b/addons/boa/tests/utils.py
index c5ad1f474b0..4e1b3d9d82d 100644
--- a/addons/boa/tests/utils.py
+++ b/addons/boa/tests/utils.py
@@ -1,16 +1,19 @@
from addons.base.tests.base import AddonTestCase, OAuthAddonTestCaseMixin
from addons.boa.models import BoaProvider, BoaSerializer, NodeSettings
-from addons.boa.tests.factories import BoaAccountFactory, BoaNodeSettingsFactory, BoaUserSettingsFactory
+from addons.boa.tests.factories import (
+ BoaAccountFactory,
+ BoaNodeSettingsFactory,
+ BoaUserSettingsFactory,
+)
class BoaAddonTestCaseBaseMixin:
-
- short_name = 'boa'
- full_name = 'Boa'
+ short_name = "boa"
+ full_name = "Boa"
client = None # Non-oauth add-on does not have client
folder = None # Remote computing add-on does not have folder
- addon_short_name = 'boa'
- ADDON_SHORT_NAME = 'boa'
+ addon_short_name = "boa"
+ ADDON_SHORT_NAME = "boa"
Provider = BoaProvider
Serializer = BoaSerializer
ExternalAccountFactory = BoaAccountFactory
@@ -19,8 +22,9 @@ class BoaAddonTestCaseBaseMixin:
UserSettingsFactory = BoaUserSettingsFactory
-class BoaBasicAuthAddonTestCase(BoaAddonTestCaseBaseMixin, OAuthAddonTestCaseMixin, AddonTestCase):
-
+class BoaBasicAuthAddonTestCase(
+ BoaAddonTestCaseBaseMixin, OAuthAddonTestCaseMixin, AddonTestCase
+):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.auth = None
diff --git a/addons/boa/views.py b/addons/boa/views.py
index 05cf37729c5..86ba847cda3 100644
--- a/addons/boa/views.py
+++ b/addons/boa/views.py
@@ -20,8 +20,8 @@
logger = logging.getLogger(__name__)
-SHORT_NAME = 'boa'
-FULL_NAME = 'Boa'
+SHORT_NAME = "boa"
+FULL_NAME = "Boa"
boa_account_list = generic_views.account_list(SHORT_NAME, BoaSerializer)
boa_import_auth = generic_views.import_auth(SHORT_NAME, BoaSerializer)
@@ -35,22 +35,29 @@ def boa_add_user_account(auth, **kwargs):
This view expects `username` and `password` fields in the JSON body of the request.
"""
- username = request.json.get('username')
- password = request.json.get('password')
+ username = request.json.get("username")
+ password = request.json.get("password")
try:
boa_client = BoaClient(endpoint=BOA_API_ENDPOINT)
boa_client.login(username, password)
boa_client.close()
except BoaException:
- return {'message': 'Boa Login failed.'}, http_status.HTTP_401_UNAUTHORIZED
-
- provider = BoaProvider(account=None, host=BOA_API_ENDPOINT, username=username, password=password)
+ return {
+ "message": "Boa Login failed."
+ }, http_status.HTTP_401_UNAUTHORIZED
+
+ provider = BoaProvider(
+ account=None,
+ host=BOA_API_ENDPOINT,
+ username=username,
+ password=password,
+ )
try:
provider.account.save()
except ValidationError:
provider.account = ExternalAccount.objects.get(
provider=provider.short_name,
- provider_id=f'{BOA_API_ENDPOINT}:{username}'.lower()
+ provider_id=f"{BOA_API_ENDPOINT}:{username}".lower(),
)
if provider.account.oauth_key != password:
provider.account.oauth_key = password
@@ -62,52 +69,68 @@ def boa_add_user_account(auth, **kwargs):
if not user.external_accounts.filter(id=provider.account.id).exists():
user.external_accounts.add(provider.account)
- user.get_or_add_addon('boa', auth=auth)
+ user.get_or_add_addon("boa", auth=auth)
user.save()
return {}
@must_be_logged_in
-@must_have_addon(SHORT_NAME, 'node')
+@must_have_addon(SHORT_NAME, "node")
@must_have_permission(permissions.WRITE)
def boa_submit_job(node_addon, **kwargs):
-
req_params = request.json
# Boa addon configuration
provider = node_addon.external_account.provider_id
- parts = provider.rsplit(':', 1)
+ parts = provider.rsplit(":", 1)
host, username = parts[0], parts[1]
password = node_addon.external_account.oauth_key
# User and project
- user = kwargs['auth'].user
+ user = kwargs["auth"].user
user_guid = user._id
- project_guid = req_params['data']['nodeId']
+ project_guid = req_params["data"]["nodeId"]
# Query file
- file_name = req_params['data']['name']
- file_size = req_params['data']['sizeInt']
- file_full_path = req_params['data']['materialized']
- file_download_url = req_params['data']['links']['download'].replace(osf_settings.WATERBUTLER_URL,
- osf_settings.WATERBUTLER_INTERNAL_URL)
+ file_name = req_params["data"]["name"]
+ file_size = req_params["data"]["sizeInt"]
+ file_full_path = req_params["data"]["materialized"]
+ file_download_url = req_params["data"]["links"]["download"].replace(
+ osf_settings.WATERBUTLER_URL, osf_settings.WATERBUTLER_INTERNAL_URL
+ )
# Parent folder: project root is different from sub-folder
- is_addon_root = req_params['parent'].get('isAddonRoot', False)
+ is_addon_root = req_params["parent"].get("isAddonRoot", False)
if is_addon_root:
project_node = AbstractNode.load(project_guid)
base_url = project_node.osfstorage_region.waterbutler_url
- parent_wb_url = waterbutler_api_url_for(project_guid, 'osfstorage', _internal=True, base_url=base_url)
- output_upload_url = f'{parent_wb_url}?kind=file'
+ parent_wb_url = waterbutler_api_url_for(
+ project_guid, "osfstorage", _internal=True, base_url=base_url
+ )
+ output_upload_url = f"{parent_wb_url}?kind=file"
else:
- output_upload_url = req_params['parent']['links']['upload'].replace(osf_settings.WATERBUTLER_URL,
- osf_settings.WATERBUTLER_INTERNAL_URL)
+ output_upload_url = req_params["parent"]["links"]["upload"].replace(
+ osf_settings.WATERBUTLER_URL, osf_settings.WATERBUTLER_INTERNAL_URL
+ )
# Boa dataset
- dataset = req_params['dataset']
+ dataset = req_params["dataset"]
# Send to task ``submit_to_boa``
- enqueue_task(submit_to_boa.s(host, username, password, user_guid, project_guid, dataset,
- file_name, file_size, file_full_path, file_download_url, output_upload_url))
+ enqueue_task(
+ submit_to_boa.s(
+ host,
+ username,
+ password,
+ user_guid,
+ project_guid,
+ dataset,
+ file_name,
+ file_size,
+ file_full_path,
+ file_download_url,
+ output_upload_url,
+ )
+ )
return {}
diff --git a/addons/box/apps.py b/addons/box/apps.py
index d7563b94fca..8fe8b25e1e3 100644
--- a/addons/box/apps.py
+++ b/addons/box/apps.py
@@ -2,18 +2,18 @@
from addons.box.settings import MAX_UPLOAD_SIZE
-box_root_folder = generic_root_folder('box')
+box_root_folder = generic_root_folder("box")
-class BoxAddonAppConfig(BaseAddonAppConfig):
+class BoxAddonAppConfig(BaseAddonAppConfig):
default = True
- name = 'addons.box'
- label = 'addons_box'
- full_name = 'Box'
- short_name = 'box'
- owners = ['user', 'node']
- configs = ['accounts', 'node']
- categories = ['storage']
+ name = "addons.box"
+ label = "addons_box"
+ full_name = "Box"
+ short_name = "box"
+ owners = ["user", "node"]
+ configs = ["accounts", "node"]
+ categories = ["storage"]
has_hgrid_files = True
max_file_size = MAX_UPLOAD_SIZE
@@ -21,21 +21,26 @@ class BoxAddonAppConfig(BaseAddonAppConfig):
def get_hgrid_data(self):
return box_root_folder
- FOLDER_SELECTED = 'box_folder_selected'
- NODE_AUTHORIZED = 'box_node_authorized'
- NODE_DEAUTHORIZED = 'box_node_deauthorized'
+ FOLDER_SELECTED = "box_folder_selected"
+ NODE_AUTHORIZED = "box_node_authorized"
+ NODE_DEAUTHORIZED = "box_node_deauthorized"
- actions = (FOLDER_SELECTED, NODE_AUTHORIZED, NODE_DEAUTHORIZED, )
+ actions = (
+ FOLDER_SELECTED,
+ NODE_AUTHORIZED,
+ NODE_DEAUTHORIZED,
+ )
@property
def routes(self):
from . import routes
+
return [routes.api_routes]
@property
def user_settings(self):
- return self.get_model('UserSettings')
+ return self.get_model("UserSettings")
@property
def node_settings(self):
- return self.get_model('NodeSettings')
+ return self.get_model("NodeSettings")
diff --git a/addons/box/models.py b/addons/box/models.py
index 78002b122df..c2c22067443 100644
--- a/addons/box/models.py
+++ b/addons/box/models.py
@@ -3,8 +3,11 @@
import os
import requests
-from addons.base.models import (BaseOAuthNodeSettings, BaseOAuthUserSettings,
- BaseStorageAddon)
+from addons.base.models import (
+ BaseOAuthNodeSettings,
+ BaseOAuthUserSettings,
+ BaseStorageAddon,
+)
from boxsdk import Client, OAuth2
from boxsdk.exception import BoxAPIException
from django.db import models
@@ -24,7 +27,7 @@
class BoxFileNode(BaseFileNode):
- _provider = 'box'
+ _provider = "box"
class BoxFolder(BoxFileNode, Folder):
@@ -35,14 +38,14 @@ class BoxFile(BoxFileNode, File):
@property
def _hashes(self):
try:
- return {'sha1': self._history[-1]['extra']['hashes']['sha1']}
+ return {"sha1": self._history[-1]["extra"]["hashes"]["sha1"]}
except (IndexError, KeyError):
return None
class Provider(ExternalProvider):
- name = 'Box'
- short_name = 'box'
+ name = "Box"
+ short_name = "box"
client_id = settings.BOX_KEY
client_secret = settings.BOX_SECRET
@@ -52,32 +55,33 @@ class Provider(ExternalProvider):
auto_refresh_url = callback_url
refresh_time = settings.REFRESH_TIME
expiry_time = settings.EXPIRY_TIME
- default_scopes = ['root_readwrite']
+ default_scopes = ["root_readwrite"]
def handle_callback(self, response):
"""View called when the Oauth flow is completed. Adds a new UserSettings
record to the user and saves the user's access token and account info.
"""
- client = Client(OAuth2(
- access_token=response['access_token'],
- refresh_token=response['refresh_token'],
- client_id=settings.BOX_KEY,
- client_secret=settings.BOX_SECRET,
- ))
+ client = Client(
+ OAuth2(
+ access_token=response["access_token"],
+ refresh_token=response["refresh_token"],
+ client_id=settings.BOX_KEY,
+ client_secret=settings.BOX_SECRET,
+ )
+ )
about = client.user().get()
return {
- 'provider_id': about['id'],
- 'display_name': about['name'],
- 'profile_url': f'https://app.box.com/profile/{about["id"]}'
+ "provider_id": about["id"],
+ "display_name": about["name"],
+ "profile_url": f'https://app.box.com/profile/{about["id"]}',
}
class UserSettings(BaseOAuthUserSettings):
- """Stores user-specific box information
- """
+ """Stores user-specific box information"""
oauth_provider = Provider
serializer = BoxSerializer
@@ -86,13 +90,13 @@ def revoke_remote_oauth_access(self, external_account):
try:
# TODO: write client for box, stop using third-party lib
requests.request(
- 'POST',
+ "POST",
settings.BOX_OAUTH_REVOKE_ENDPOINT,
params={
- 'client_id': settings.BOX_KEY,
- 'client_secret': settings.BOX_SECRET,
- 'token': external_account.oauth_key,
- }
+ "client_id": settings.BOX_KEY,
+ "client_secret": settings.BOX_SECRET,
+ "token": external_account.oauth_key,
+ },
)
except requests.HTTPError:
pass
@@ -105,7 +109,9 @@ class NodeSettings(BaseOAuthNodeSettings, BaseStorageAddon):
folder_id = models.TextField(null=True, blank=True)
folder_name = models.TextField(null=True, blank=True)
folder_path = models.TextField(null=True, blank=True)
- user_settings = models.ForeignKey(UserSettings, null=True, blank=True, on_delete=models.CASCADE)
+ user_settings = models.ForeignKey(
+ UserSettings, null=True, blank=True, on_delete=models.CASCADE
+ )
_api = None
@@ -118,31 +124,38 @@ def api(self):
@property
def display_name(self):
- return f'{self.config.full_name}: {self.folder_id}'
+ return f"{self.config.full_name}: {self.folder_id}"
def fetch_full_folder_path(self):
return self.folder_path
def get_folders(self, **kwargs):
- folder_id = kwargs.get('folder_id')
+ folder_id = kwargs.get("folder_id")
if folder_id is None:
- return [{
- 'id': '0',
- 'path': '/',
- 'addon': 'box',
- 'kind': 'folder',
- 'name': '/ (Full Box)',
- 'urls': {
- # 'folders': node.api_url_for('box_folder_list', folderId=0),
- 'folders': api_v2_url(f'nodes/{self.owner._id}/addons/box/folders/',
- params={'id': '0'}
- )
+ return [
+ {
+ "id": "0",
+ "path": "/",
+ "addon": "box",
+ "kind": "folder",
+ "name": "/ (Full Box)",
+ "urls": {
+ # 'folders': node.api_url_for('box_folder_list', folderId=0),
+ "folders": api_v2_url(
+ f"nodes/{self.owner._id}/addons/box/folders/",
+ params={"id": "0"},
+ )
+ },
}
- }]
+ ]
try:
Provider(self.external_account).refresh_oauth_key()
- oauth = OAuth2(client_id=settings.BOX_KEY, client_secret=settings.BOX_SECRET, access_token=ensure_str(self.external_account.oauth_key))
+ oauth = OAuth2(
+ client_id=settings.BOX_KEY,
+ client_secret=settings.BOX_SECRET,
+ access_token=ensure_str(self.external_account.oauth_key),
+ )
client = Client(oauth)
except BoxAPIException:
raise HTTPError(http_status.HTTP_403_FORBIDDEN)
@@ -154,34 +167,35 @@ def get_folders(self, **kwargs):
except MaxRetryError:
raise HTTPError(http_status.HTTP_400_BAD_REQUEST)
- folder_path = '/'.join(
- [
- x['name']
- for x in metadata['path_collection']['entries']
- ] + [metadata['name']]
+ folder_path = "/".join(
+ [x["name"] for x in metadata["path_collection"]["entries"]]
+ + [metadata["name"]]
)
return [
{
- 'addon': 'box',
- 'kind': 'folder',
- 'id': item['id'],
- 'name': item['name'],
- 'path': os.path.join(folder_path, item['name']).replace('All Files', ''),
- 'urls': {
- 'folders': api_v2_url(f'nodes/{self.owner._id}/addons/box/folders/',
- params={'id': item['id']}
+ "addon": "box",
+ "kind": "folder",
+ "id": item["id"],
+ "name": item["name"],
+ "path": os.path.join(folder_path, item["name"]).replace(
+ "All Files", ""
+ ),
+ "urls": {
+ "folders": api_v2_url(
+ f"nodes/{self.owner._id}/addons/box/folders/",
+ params={"id": item["id"]},
)
- }
+ },
}
- for item in metadata['item_collection']['entries']
- if item['type'] == 'folder'
+ for item in metadata["item_collection"]["entries"]
+ if item["type"] == "folder"
]
def set_folder(self, folder_id, auth):
self.folder_id = str(folder_id)
self.folder_name, self.folder_path = self._folder_data(folder_id)
- self.nodelogger.log(action='folder_selected', save=True)
+ self.nodelogger.log(action="folder_selected", save=True)
def _folder_data(self, folder_id):
# Split out from set_folder for ease of testing, due to
@@ -191,17 +205,30 @@ def _folder_data(self, folder_id):
except InvalidGrantError:
raise exceptions.InvalidAuthError()
try:
- oauth = OAuth2(client_id=settings.BOX_KEY, client_secret=settings.BOX_SECRET, access_token=ensure_str(self.external_account.oauth_key))
+ oauth = OAuth2(
+ client_id=settings.BOX_KEY,
+ client_secret=settings.BOX_SECRET,
+ access_token=ensure_str(self.external_account.oauth_key),
+ )
client = Client(oauth)
folder_data = client.folder(self.folder_id).get()
except BoxAPIException:
raise exceptions.InvalidFolderError()
- folder_name = folder_data['name'].replace('All Files', '') or '/ (Full Box)'
- folder_path = '/'.join(
- [x['name'] for x in folder_data['path_collection']['entries'] if x['name']] +
- [folder_data['name']]
- ).replace('All Files', '') or '/'
+ folder_name = (
+ folder_data["name"].replace("All Files", "") or "/ (Full Box)"
+ )
+ folder_path = (
+ "/".join(
+ [
+ x["name"]
+ for x in folder_data["path_collection"]["entries"]
+ if x["name"]
+ ]
+ + [folder_data["name"]]
+ ).replace("All Files", "")
+ or "/"
+ )
return folder_name, folder_path
@@ -216,44 +243,50 @@ def deauthorize(self, auth=None, add_log=True):
self.clear_settings()
if add_log:
- extra = {'folder_id': folder_id}
- self.nodelogger.log(action='node_deauthorized', extra=extra, save=True)
+ extra = {"folder_id": folder_id}
+ self.nodelogger.log(
+ action="node_deauthorized", extra=extra, save=True
+ )
self.clear_auth()
def serialize_waterbutler_credentials(self):
if not self.has_auth:
- raise exceptions.AddonError('Addon is not authorized')
+ raise exceptions.AddonError("Addon is not authorized")
try:
Provider(self.external_account).refresh_oauth_key()
- return {'token': self.external_account.oauth_key}
+ return {"token": self.external_account.oauth_key}
except BoxAPIException as error:
- raise HTTPError(error.status_code, data={'message_long': error.message})
+ raise HTTPError(
+ error.status_code, data={"message_long": error.message}
+ )
def serialize_waterbutler_settings(self):
if self.folder_id is None:
- raise exceptions.AddonError('Folder is not configured')
- return {'folder': self.folder_id}
+ raise exceptions.AddonError("Folder is not configured")
+ return {"folder": self.folder_id}
def create_waterbutler_log(self, auth, action, metadata):
self.owner.add_log(
- f'box_{action}',
+ f"box_{action}",
auth=auth,
params={
- 'path': metadata['materialized'],
- 'project': self.owner.parent_id,
- 'node': self.owner._id,
- 'folder': self.folder_id,
- 'urls': {
- 'view': self.owner.web_url_for('addon_view_or_download_file',
- provider='box',
- action='view',
- path=metadata['path']
+ "path": metadata["materialized"],
+ "project": self.owner.parent_id,
+ "node": self.owner._id,
+ "folder": self.folder_id,
+ "urls": {
+ "view": self.owner.web_url_for(
+ "addon_view_or_download_file",
+ provider="box",
+ action="view",
+ path=metadata["path"],
),
- 'download': self.owner.web_url_for('addon_view_or_download_file',
- provider='box',
- action='download',
- path=metadata['path']
+ "download": self.owner.web_url_for(
+ "addon_view_or_download_file",
+ provider="box",
+ action="download",
+ path=metadata["path"],
),
},
},
diff --git a/addons/box/routes.py b/addons/box/routes.py
index b29d650902d..a5ed8947c55 100644
--- a/addons/box/routes.py
+++ b/addons/box/routes.py
@@ -1,64 +1,65 @@
"""Box addon routes."""
+
from framework.routing import Rule, json_renderer
from . import views
api_routes = {
- 'rules': [
+ "rules": [
Rule(
[
- '/settings/box/accounts/',
+ "/settings/box/accounts/",
],
- 'get',
+ "get",
views.box_account_list,
json_renderer,
),
Rule(
[
- '/project//box/settings/',
- '/project//node//box/settings/'
+ "/project//box/settings/",
+ "/project//node//box/settings/",
],
- 'get',
+ "get",
views.box_get_config,
json_renderer,
),
Rule(
[
- '/project//box/settings/',
- '/project//node//box/settings/'
+ "/project//box/settings/",
+ "/project//node//box/settings/",
],
- 'put',
+ "put",
views.box_set_config,
json_renderer,
),
Rule(
[
- '/project//box/user_auth/',
- '/project//node//box/user_auth/'
+ "/project//box/user_auth/",
+ "/project//node//box/user_auth/",
],
- 'put',
+ "put",
views.box_import_auth,
json_renderer,
),
Rule(
[
- '/project//box/user_auth/',
- '/project//node//box/user_auth/'
+ "/project//box/user_auth/",
+ "/project//node//box/user_auth/",
],
- 'delete',
+ "delete",
views.box_deauthorize_node,
json_renderer,
),
Rule(
[
- '/project//box/folders/',
- '/project//node//box/folders/',
+ "/project//box/folders/",
+ "/project//node//box/folders/",
],
- 'get',
+ "get",
views.box_folder_list,
json_renderer,
),
],
- 'prefix': '/api/v1'
+ "prefix": "/api/v1",
}
diff --git a/addons/box/serializer.py b/addons/box/serializer.py
index 23ec5d87cba..014ee10ab73 100644
--- a/addons/box/serializer.py
+++ b/addons/box/serializer.py
@@ -6,18 +6,23 @@
from boxsdk import Client, OAuth2
from boxsdk.exception import BoxAPIException
-class BoxSerializer(StorageAddonSerializer):
- addon_short_name = 'box'
+class BoxSerializer(StorageAddonSerializer):
+ addon_short_name = "box"
def credentials_are_valid(self, user_settings, client):
from addons.box.models import Provider as Box # Avoid circular import
+
if self.node_settings.has_auth:
if Box(self.node_settings.external_account).refresh_oauth_key():
return True
if user_settings:
- oauth = OAuth2(client_id=settings.BOX_KEY, client_secret=settings.BOX_SECRET, access_token=user_settings.external_accounts[0].oauth_key)
+ oauth = OAuth2(
+ client_id=settings.BOX_KEY,
+ client_secret=settings.BOX_SECRET,
+ access_token=user_settings.external_accounts[0].oauth_key,
+ )
client = client or Client(oauth)
try:
client.user()
@@ -28,8 +33,10 @@ def credentials_are_valid(self, user_settings, client):
def serialized_folder(self, node_settings):
path = node_settings.fetch_full_folder_path()
return {
- 'path': path,
- 'name': path.replace('All Files', '', 1) if path != '/' else '/ (Full Box)'
+ "path": path,
+ "name": path.replace("All Files", "", 1)
+ if path != "/"
+ else "/ (Full Box)",
}
@property
@@ -37,12 +44,11 @@ def addon_serialized_urls(self):
node = self.node_settings.owner
return {
- 'auth': api_url_for('oauth_connect',
- service_name='box'),
- 'importAuth': node.api_url_for('box_import_auth'),
- 'files': node.web_url_for('collect_file_trees'),
- 'folders': node.api_url_for('box_folder_list'),
- 'config': node.api_url_for('box_set_config'),
- 'deauthorize': node.api_url_for('box_deauthorize_node'),
- 'accounts': node.api_url_for('box_account_list'),
+ "auth": api_url_for("oauth_connect", service_name="box"),
+ "importAuth": node.api_url_for("box_import_auth"),
+ "files": node.web_url_for("collect_file_trees"),
+ "folders": node.api_url_for("box_folder_list"),
+ "config": node.api_url_for("box_set_config"),
+ "deauthorize": node.api_url_for("box_deauthorize_node"),
+ "accounts": node.api_url_for("box_account_list"),
}
diff --git a/addons/box/settings/__init__.py b/addons/box/settings/__init__.py
index eb5d40c3725..8ef6b3f33e5 100644
--- a/addons/box/settings/__init__.py
+++ b/addons/box/settings/__init__.py
@@ -6,4 +6,4 @@
try:
from .local import * # noqa
except ImportError:
- logger.warning('No local.py settings file found')
+ logger.warning("No local.py settings file found")
diff --git a/addons/box/settings/defaults.py b/addons/box/settings/defaults.py
index 65c127e3f0d..d35f6c49e44 100644
--- a/addons/box/settings/defaults.py
+++ b/addons/box/settings/defaults.py
@@ -6,9 +6,9 @@
EXPIRY_TIME = 60 * 60 * 24 * 60 # 60 days
REFRESH_TIME = 5 * 60 # 5 minutes
-BOX_OAUTH_TOKEN_ENDPOINT = 'https://www.box.com/api/oauth2/token'
-BOX_OAUTH_AUTH_ENDPOINT = 'https://www.box.com/api/oauth2/authorize'
-BOX_OAUTH_REVOKE_ENDPOINT = 'https://api.box.com/oauth2/revoke'
+BOX_OAUTH_TOKEN_ENDPOINT = "https://www.box.com/api/oauth2/token"
+BOX_OAUTH_AUTH_ENDPOINT = "https://www.box.com/api/oauth2/authorize"
+BOX_OAUTH_REVOKE_ENDPOINT = "https://api.box.com/oauth2/revoke"
# Max file size permitted by frontend in megabytes
MAX_UPLOAD_SIZE = 250
diff --git a/addons/box/settings/local-dist.py b/addons/box/settings/local-dist.py
index 38d245c2df6..8fa9dbff9cb 100644
--- a/addons/box/settings/local-dist.py
+++ b/addons/box/settings/local-dist.py
@@ -1,6 +1,7 @@
"""Example Box local settings file. Copy this file to local.py and change
these settings.
"""
+
# Get an app key and secret at https://www.box.com/developers/apps
BOX_KEY = None
BOX_SECRET = None
diff --git a/addons/box/tests/factories.py b/addons/box/tests/factories.py
index 17b1898e49d..ba4684638f9 100644
--- a/addons/box/tests/factories.py
+++ b/addons/box/tests/factories.py
@@ -1,21 +1,25 @@
"""Factory boy factories for the Box addon."""
-from datetime import datetime
+
from dateutil.relativedelta import relativedelta
from django.utils import timezone
from factory import SubFactory, Sequence
from factory.django import DjangoModelFactory
-from osf_tests.factories import UserFactory, ProjectFactory, ExternalAccountFactory
+from osf_tests.factories import (
+ UserFactory,
+ ProjectFactory,
+ ExternalAccountFactory,
+)
from addons.box.models import NodeSettings
from addons.box.models import UserSettings
class BoxAccountFactory(ExternalAccountFactory):
- provider = 'box'
- provider_id = Sequence(lambda n: f'id-{n}')
- oauth_key = Sequence(lambda n: f'key-{n}')
+ provider = "box"
+ provider_id = Sequence(lambda n: f"id-{n}")
+ oauth_key = Sequence(lambda n: f"key-{n}")
expires_at = timezone.now() + relativedelta(seconds=3600)
diff --git a/addons/box/tests/test_client.py b/addons/box/tests/test_client.py
index e6051137ed5..47301b1e4eb 100644
--- a/addons/box/tests/test_client.py
+++ b/addons/box/tests/test_client.py
@@ -7,19 +7,18 @@
pytestmark = pytest.mark.django_db
-class TestCore(unittest.TestCase):
+class TestCore(unittest.TestCase):
def setUp(self):
-
super().setUp()
self.user = UserFactory()
- self.user.add_addon('box')
+ self.user.add_addon("box")
self.user.save()
- self.settings = self.user.get_addon('box')
+ self.settings = self.user.get_addon("box")
self.settings.save()
def test_get_addon_returns_box_user_settings(self):
- result = self.user.get_addon('box')
+ result = self.user.get_addon("box")
assert isinstance(result, UserSettings)
diff --git a/addons/box/tests/test_models.py b/addons/box/tests/test_models.py
index 6829b754d23..6ef0ba44ad9 100644
--- a/addons/box/tests/test_models.py
+++ b/addons/box/tests/test_models.py
@@ -12,9 +12,11 @@
pytestmark = pytest.mark.django_db
-class TestBoxNodeSettings(OAuthAddonNodeSettingsTestSuiteMixin, unittest.TestCase):
- full_name = 'Box'
- short_name = 'box'
+class TestBoxNodeSettings(
+ OAuthAddonNodeSettingsTestSuiteMixin, unittest.TestCase
+):
+ full_name = "Box"
+ short_name = "box"
ExternalAccountFactory = factories.BoxAccountFactory
NodeSettingsClass = NodeSettings
@@ -23,9 +25,7 @@ class TestBoxNodeSettings(OAuthAddonNodeSettingsTestSuiteMixin, unittest.TestCas
def setUp(self):
self.mock_data = mock.patch.object(
- NodeSettings,
- '_folder_data',
- return_value=('12235', '/Foo')
+ NodeSettings, "_folder_data", return_value=("12235", "/Foo")
)
self.mock_data.start()
super().setUp()
@@ -35,22 +35,29 @@ def tearDown(self):
super().tearDown()
def test_folder_defaults_to_none(self):
- node_settings = NodeSettings(user_settings=self.user_settings, owner=factories.ProjectFactory())
+ node_settings = NodeSettings(
+ user_settings=self.user_settings, owner=factories.ProjectFactory()
+ )
node_settings.save()
assert node_settings.folder_id is None
- @mock.patch('addons.box.models.Provider.refresh_oauth_key')
+ @mock.patch("addons.box.models.Provider.refresh_oauth_key")
def test_serialize_credentials(self, mock_refresh):
mock_refresh.return_value = True
super().test_serialize_credentials()
- @mock.patch('addons.box.models.UserSettings.revoke_remote_oauth_access', mock.PropertyMock())
+ @mock.patch(
+ "addons.box.models.UserSettings.revoke_remote_oauth_access",
+ mock.PropertyMock(),
+ )
def test_complete_has_auth_not_verified(self):
super().test_complete_has_auth_not_verified()
-class TestBoxUserSettings(OAuthAddonUserSettingTestSuiteMixin, unittest.TestCase):
- full_name = 'Box'
- short_name = 'box'
+class TestBoxUserSettings(
+ OAuthAddonUserSettingTestSuiteMixin, unittest.TestCase
+):
+ full_name = "Box"
+ short_name = "box"
ExternalAccountFactory = factories.BoxAccountFactory
diff --git a/addons/box/tests/test_serializer.py b/addons/box/tests/test_serializer.py
index e32adec4faf..fd8ab53d34e 100644
--- a/addons/box/tests/test_serializer.py
+++ b/addons/box/tests/test_serializer.py
@@ -1,4 +1,5 @@
"""Serializer tests for the Box addon."""
+
from unittest import mock
import pytest
@@ -11,9 +12,9 @@
mock_client = MockBox()
pytestmark = pytest.mark.django_db
-class TestBoxSerializer(StorageAddonSerializerTestSuiteMixin, OsfTestCase):
- addon_short_name = 'box'
+class TestBoxSerializer(StorageAddonSerializerTestSuiteMixin, OsfTestCase):
+ addon_short_name = "box"
Serializer = BoxSerializer
ExternalAccountFactory = BoxAccountFactory
@@ -21,9 +22,7 @@ class TestBoxSerializer(StorageAddonSerializerTestSuiteMixin, OsfTestCase):
def setUp(self):
self.mock_valid = mock.patch.object(
- BoxSerializer,
- 'credentials_are_valid',
- return_value=True
+ BoxSerializer, "credentials_are_valid", return_value=True
)
self.mock_valid.start()
super().setUp()
diff --git a/addons/box/tests/test_views.py b/addons/box/tests/test_views.py
index f8da949c5bf..125a876462e 100644
--- a/addons/box/tests/test_views.py
+++ b/addons/box/tests/test_views.py
@@ -1,4 +1,5 @@
"""Views tests for the Box addon."""
+
from django.utils import timezone
from rest_framework import status as http_status
from unittest import mock
@@ -18,16 +19,22 @@
BoxAddonTestCase,
MockBox,
patch_client,
- mock_responses
+ mock_responses,
)
mock_client = MockBox()
pytestmark = pytest.mark.django_db
-class TestAuthViews(BoxAddonTestCase, views_testing.OAuthAddonAuthViewsTestCaseMixin, OsfTestCase):
+class TestAuthViews(
+ BoxAddonTestCase,
+ views_testing.OAuthAddonAuthViewsTestCaseMixin,
+ OsfTestCase,
+):
def setUp(self):
- self.mock_refresh = mock.patch('addons.box.models.Provider.refresh_oauth_key')
+ self.mock_refresh = mock.patch(
+ "addons.box.models.Provider.refresh_oauth_key"
+ )
self.mock_refresh.return_value = True
self.mock_refresh.start()
super().setUp()
@@ -37,27 +44,27 @@ def tearDown(self):
super().tearDown()
@mock.patch(
- 'addons.box.models.UserSettings.revoke_remote_oauth_access',
- mock.PropertyMock()
+ "addons.box.models.UserSettings.revoke_remote_oauth_access",
+ mock.PropertyMock(),
)
def test_delete_external_account(self):
super().test_delete_external_account()
-class TestConfigViews(BoxAddonTestCase, views_testing.OAuthAddonConfigViewsTestCaseMixin, OsfTestCase):
-
- folder = {
- 'path': '/Foo',
- 'id': '12234'
- }
+class TestConfigViews(
+ BoxAddonTestCase,
+ views_testing.OAuthAddonConfigViewsTestCaseMixin,
+ OsfTestCase,
+):
+ folder = {"path": "/Foo", "id": "12234"}
Serializer = BoxSerializer
client = mock_client
def setUp(self):
self.mock_data = mock.patch.object(
NodeSettings,
- '_folder_data',
- return_value=(self.folder['id'], self.folder['path'])
+ "_folder_data",
+ return_value=(self.folder["id"], self.folder["path"]),
)
self.mock_data.start()
super().setUp()
@@ -66,18 +73,24 @@ def tearDown(self):
self.mock_data.stop()
super().tearDown()
- @mock.patch.object(BoxSerializer, 'credentials_are_valid', return_value=True)
+ @mock.patch.object(
+ BoxSerializer, "credentials_are_valid", return_value=True
+ )
def test_import_auth(self, *args):
super().test_import_auth()
-class TestFilebrowserViews(BoxAddonTestCase, OsfTestCase):
+class TestFilebrowserViews(BoxAddonTestCase, OsfTestCase):
def setUp(self):
super().setUp()
- self.user.add_addon('box')
- self.node_settings.external_account = self.user_settings.external_accounts[0]
+ self.user.add_addon("box")
+ self.node_settings.external_account = (
+ self.user_settings.external_accounts[0]
+ )
self.node_settings.save()
- self.patcher_refresh = mock.patch('addons.box.models.Provider.refresh_oauth_key')
+ self.patcher_refresh = mock.patch(
+ "addons.box.models.Provider.refresh_oauth_key"
+ )
self.patcher_refresh.return_value = True
self.patcher_refresh.start()
@@ -85,72 +98,90 @@ def tearDown(self):
self.patcher_refresh.stop()
def test_box_list_folders(self):
- with mock.patch('addons.box.models.Client.folder') as folder_mock:
- folder_mock.return_value.get.return_value = mock_responses['folder']
- url = self.project.api_url_for('box_folder_list', folder_id='foo')
+ with mock.patch("addons.box.models.Client.folder") as folder_mock:
+ folder_mock.return_value.get.return_value = mock_responses[
+ "folder"
+ ]
+ url = self.project.api_url_for("box_folder_list", folder_id="foo")
res = self.app.get(url, auth=self.user.auth)
- contents = mock_client.folder('', list=True)['item_collection']['entries']
- expected = [each for each in contents if each['type'] == 'folder']
+ contents = mock_client.folder("", list=True)["item_collection"][
+ "entries"
+ ]
+ expected = [each for each in contents if each["type"] == "folder"]
assert len(res.json) == len(expected)
first = res.json[0]
- assert 'kind' in first
- assert first['name'] == contents[0]['name']
+ assert "kind" in first
+ assert first["name"] == contents[0]["name"]
- @mock.patch('addons.box.models.NodeSettings.folder_id')
+ @mock.patch("addons.box.models.NodeSettings.folder_id")
def test_box_list_folders_if_folder_is_none(self, mock_folder):
# If folder is set to none, no data are returned
mock_folder.__get__ = mock.Mock(return_value=None)
- url = self.project.api_url_for('box_folder_list')
+ url = self.project.api_url_for("box_folder_list")
res = self.app.get(url, auth=self.user.auth)
assert len(res.json) == 1
def test_box_list_folders_if_folder_is_none_and_folders_only(self):
- with patch_client('addons.box.models.Client'):
+ with patch_client("addons.box.models.Client"):
self.node_settings.folder_name = None
self.node_settings.save()
- url = api_url_for('box_folder_list',
- pid=self.project._primary_key, foldersOnly=True)
+ url = api_url_for(
+ "box_folder_list",
+ pid=self.project._primary_key,
+ foldersOnly=True,
+ )
res = self.app.get(url, auth=self.user.auth)
- contents = mock_client.folder('', list=True)['item_collection']['entries']
- expected = [each for each in contents if each['type'] == 'folder']
+ contents = mock_client.folder("", list=True)["item_collection"][
+ "entries"
+ ]
+ expected = [each for each in contents if each["type"] == "folder"]
assert len(res.json) == len(expected)
def test_box_list_folders_folders_only(self):
- with patch_client('addons.box.models.Client'):
- url = self.project.api_url_for('box_folder_list', foldersOnly=True)
+ with patch_client("addons.box.models.Client"):
+ url = self.project.api_url_for("box_folder_list", foldersOnly=True)
res = self.app.get(url, auth=self.user.auth)
- contents = mock_client.folder('', list=True)['item_collection']['entries']
- expected = [each for each in contents if each['type'] == 'folder']
+ contents = mock_client.folder("", list=True)["item_collection"][
+ "entries"
+ ]
+ expected = [each for each in contents if each["type"] == "folder"]
assert len(res.json) == len(expected)
def test_box_list_folders_doesnt_include_root(self):
- with mock.patch('addons.box.models.Client.folder') as folder_mock:
- folder_mock.return_value.get.return_value = mock_responses['folder']
- url = self.project.api_url_for('box_folder_list', folder_id=0)
+ with mock.patch("addons.box.models.Client.folder") as folder_mock:
+ folder_mock.return_value.get.return_value = mock_responses[
+ "folder"
+ ]
+ url = self.project.api_url_for("box_folder_list", folder_id=0)
res = self.app.get(url, auth=self.user.auth)
- contents = mock_client.folder('', list=True)['item_collection']['entries']
- expected = [each for each in contents if each['type'] == 'folder']
+ contents = mock_client.folder("", list=True)["item_collection"][
+ "entries"
+ ]
+ expected = [each for each in contents if each["type"] == "folder"]
assert len(res.json) == len(expected)
- @mock.patch('addons.box.models.Client.folder')
- def test_box_list_folders_returns_error_if_invalid_path(self, mock_metadata):
- mock_metadata.side_effect = BoxAPIException(status=404, message='File not found')
- url = self.project.api_url_for('box_folder_list', folder_id='lolwut')
+ @mock.patch("addons.box.models.Client.folder")
+ def test_box_list_folders_returns_error_if_invalid_path(
+ self, mock_metadata
+ ):
+ mock_metadata.side_effect = BoxAPIException(
+ status=404, message="File not found"
+ )
+ url = self.project.api_url_for("box_folder_list", folder_id="lolwut")
res = self.app.get(url, auth=self.user.auth)
assert res.status_code == http_status.HTTP_404_NOT_FOUND
- @mock.patch('addons.box.models.Client.folder')
+ @mock.patch("addons.box.models.Client.folder")
def test_box_list_folders_handles_max_retry_error(self, mock_metadata):
mock_response = mock.Mock()
- url = self.project.api_url_for('box_folder_list', folder_id='fo')
+ url = self.project.api_url_for("box_folder_list", folder_id="fo")
mock_metadata.side_effect = MaxRetryError(mock_response, url)
res = self.app.get(url, auth=self.user.auth)
assert res.status_code == http_status.HTTP_400_BAD_REQUEST
class TestRestrictions(BoxAddonTestCase, OsfTestCase):
-
def setUp(self):
super(BoxAddonTestCase, self).setUp()
@@ -160,38 +191,41 @@ def setUp(self):
self.project.add_contributor(self.contrib, auth=Auth(self.user))
self.project.save()
- self.user.add_addon('box')
- settings = self.user.get_addon('box')
- settings.access_token = '12345abc'
+ self.user.add_addon("box")
+ settings = self.user.get_addon("box")
+ settings.access_token = "12345abc"
settings.last_refreshed = timezone.now()
settings.save()
- self.patcher = mock.patch('addons.box.models.NodeSettings.fetch_folder_name')
- self.patcher.return_value = 'foo bar/baz'
+ self.patcher = mock.patch(
+ "addons.box.models.NodeSettings.fetch_folder_name"
+ )
+ self.patcher.return_value = "foo bar/baz"
self.patcher.start()
- @mock.patch('addons.box.models.NodeSettings.has_auth')
+ @mock.patch("addons.box.models.NodeSettings.has_auth")
def test_restricted_hgrid_data_contents(self, mock_auth):
mock_auth.__get__ = mock.Mock(return_value=False)
# tries to access a parent folder
- url = self.project.api_url_for('box_folder_list',
- path='foo bar')
+ url = self.project.api_url_for("box_folder_list", path="foo bar")
res = self.app.get(url, auth=self.contrib.auth)
assert res.status_code == http_status.HTTP_403_FORBIDDEN
def test_restricted_config_contrib_no_addon(self):
- url = api_url_for('box_set_config', pid=self.project._primary_key)
- res = self.app.put(url, json={'selected': {'path': 'foo'}},
- auth=self.contrib.auth)
+ url = api_url_for("box_set_config", pid=self.project._primary_key)
+ res = self.app.put(
+ url, json={"selected": {"path": "foo"}}, auth=self.contrib.auth
+ )
assert res.status_code == http_status.HTTP_400_BAD_REQUEST
def test_restricted_config_contrib_not_owner(self):
# Contributor has box auth, but is not the node authorizer
- self.contrib.add_addon('box')
+ self.contrib.add_addon("box")
self.contrib.save()
- url = api_url_for('box_set_config', pid=self.project._primary_key)
- res = self.app.put(url, json={'selected': {'path': 'foo'}},
- auth=self.contrib.auth)
+ url = api_url_for("box_set_config", pid=self.project._primary_key)
+ res = self.app.put(
+ url, json={"selected": {"path": "foo"}}, auth=self.contrib.auth
+ )
assert res.status_code == http_status.HTTP_403_FORBIDDEN
diff --git a/addons/box/tests/utils.py b/addons/box/tests/utils.py
index 7b4c942258e..1079a5d03d6 100644
--- a/addons/box/tests/utils.py
+++ b/addons/box/tests/utils.py
@@ -7,174 +7,174 @@
class BoxAddonTestCase(OAuthAddonTestCaseMixin, AddonTestCase):
-
- ADDON_SHORT_NAME = 'box'
+ ADDON_SHORT_NAME = "box"
ExternalAccountFactory = BoxAccountFactory
Provider = Provider
def set_node_settings(self, settings):
super().set_node_settings(settings)
- settings.folder_id = '1234567890'
- settings.folder_name = 'Foo'
+ settings.folder_id = "1234567890"
+ settings.folder_name = "Foo"
+
mock_responses = {
- 'folder': {
- 'name': 'anything',
- 'item_collection': {
- 'entries': [
- {
- 'name': 'anything', 'type': 'file', 'id': 'anything'
- },
- {
- 'name': 'anything', 'type': 'folder', 'id': 'anything'
- },
- {
- 'name': 'anything', 'type': 'anything', 'id': 'anything'
- },
+ "folder": {
+ "name": "anything",
+ "item_collection": {
+ "entries": [
+ {"name": "anything", "type": "file", "id": "anything"},
+ {"name": "anything", "type": "folder", "id": "anything"},
+ {"name": "anything", "type": "anything", "id": "anything"},
]
},
- 'path_collection': {
- 'entries': [
- {
- 'name': 'anything', 'type': 'file', 'id': 'anything'
- },
- {
- 'name': 'anything', 'type': 'folder', 'id': 'anything'
- },
- {
- 'name': 'anything', 'type': 'anything', 'id': 'anything'
- },
+ "path_collection": {
+ "entries": [
+ {"name": "anything", "type": "file", "id": "anything"},
+ {"name": "anything", "type": "folder", "id": "anything"},
+ {"name": "anything", "type": "anything", "id": "anything"},
]
- }
+ },
},
- 'put_file': {
- 'bytes': 77,
- 'icon': 'page_white_text',
- 'is_dir': False,
- 'mime_type': 'text/plain',
- 'modified': 'Wed, 20 Jul 2011 22:04:50 +0000',
- 'path': '/magnum-opus.txt',
- 'rev': '362e2029684fe',
- 'revision': 221922,
- 'root': 'box',
- 'size': '77 bytes',
- 'thumb_exists': False
+ "put_file": {
+ "bytes": 77,
+ "icon": "page_white_text",
+ "is_dir": False,
+ "mime_type": "text/plain",
+ "modified": "Wed, 20 Jul 2011 22:04:50 +0000",
+ "path": "/magnum-opus.txt",
+ "rev": "362e2029684fe",
+ "revision": 221922,
+ "root": "box",
+ "size": "77 bytes",
+ "thumb_exists": False,
},
- 'metadata_list': {
- 'size': '0 bytes',
- 'hash': '37eb1ba1849d4b0fb0b28caf7ef3af52',
- 'bytes': 0,
- 'thumb_exists': False,
- 'rev': '714f029684fe',
- 'modified': 'Wed, 27 Apr 2011 22:18:51 +0000',
- 'path': '/Public',
- 'is_dir': True,
- 'icon': 'folder_public',
- 'root': 'box',
- 'contents': [
+ "metadata_list": {
+ "size": "0 bytes",
+ "hash": "37eb1ba1849d4b0fb0b28caf7ef3af52",
+ "bytes": 0,
+ "thumb_exists": False,
+ "rev": "714f029684fe",
+ "modified": "Wed, 27 Apr 2011 22:18:51 +0000",
+ "path": "/Public",
+ "is_dir": True,
+ "icon": "folder_public",
+ "root": "box",
+ "contents": [
{
- 'size': '0 bytes',
- 'rev': '35c1f029684fe',
- 'thumb_exists': False,
- 'bytes': 0,
- 'modified': 'Mon, 18 Jul 2011 20:13:43 +0000',
- 'client_mtime': 'Wed, 20 Apr 2011 16:20:19 +0000',
- 'path': '/Public/latest.txt',
- 'is_dir': False,
- 'icon': 'page_white_text',
- 'root': 'box',
- 'mime_type': 'text/plain',
- 'revision': 220191
+ "size": "0 bytes",
+ "rev": "35c1f029684fe",
+ "thumb_exists": False,
+ "bytes": 0,
+ "modified": "Mon, 18 Jul 2011 20:13:43 +0000",
+ "client_mtime": "Wed, 20 Apr 2011 16:20:19 +0000",
+ "path": "/Public/latest.txt",
+ "is_dir": False,
+ "icon": "page_white_text",
+ "root": "box",
+ "mime_type": "text/plain",
+ "revision": 220191,
},
{
- 'bytes': 0,
- 'icon': 'folder',
- 'is_dir': True,
- 'modified': 'Sat, 22 Mar 2014 05:40:29 +0000',
- 'path': '/datasets/New Folder',
- 'rev': '3fed51f002c12fc',
- 'revision': 67032351,
- 'root': 'box',
- 'size': '0 bytes',
- 'thumb_exists': False
- }
+ "bytes": 0,
+ "icon": "folder",
+ "is_dir": True,
+ "modified": "Sat, 22 Mar 2014 05:40:29 +0000",
+ "path": "/datasets/New Folder",
+ "rev": "3fed51f002c12fc",
+ "revision": 67032351,
+ "root": "box",
+ "size": "0 bytes",
+ "thumb_exists": False,
+ },
],
- 'revision': 29007
+ "revision": 29007,
},
- 'metadata_single': {
- 'id': 'id',
- 'bytes': 74,
- 'client_mtime': 'Mon, 13 Jan 2014 20:24:15 +0000',
- 'icon': 'page_white',
- 'is_dir': False,
- 'mime_type': 'text/csv',
- 'modified': 'Fri, 21 Mar 2014 05:46:36 +0000',
- 'path': '/datasets/foo.txt',
- 'rev': 'a2149fb64',
- 'revision': 10,
- 'root': 'app_folder',
- 'size': '74 bytes',
- 'thumb_exists': False
+ "metadata_single": {
+ "id": "id",
+ "bytes": 74,
+ "client_mtime": "Mon, 13 Jan 2014 20:24:15 +0000",
+ "icon": "page_white",
+ "is_dir": False,
+ "mime_type": "text/csv",
+ "modified": "Fri, 21 Mar 2014 05:46:36 +0000",
+ "path": "/datasets/foo.txt",
+ "rev": "a2149fb64",
+ "revision": 10,
+ "root": "app_folder",
+ "size": "74 bytes",
+ "thumb_exists": False,
},
- 'revisions': [{'bytes': 0,
- 'client_mtime': 'Wed, 31 Dec 1969 23:59:59 +0000',
- 'icon': 'page_white_picture',
- 'is_deleted': True,
- 'is_dir': False,
- 'mime_type': 'image/png',
- 'modified': 'Tue, 25 Mar 2014 03:39:13 +0000',
- 'path': '/svs-v-barks.png',
- 'rev': '3fed741002c12fc',
- 'revision': 67032897,
- 'root': 'box',
- 'size': '0 bytes',
- 'thumb_exists': True},
- {'bytes': 151164,
- 'client_mtime': 'Sat, 13 Apr 2013 21:56:36 +0000',
- 'icon': 'page_white_picture',
- 'is_dir': False,
- 'mime_type': 'image/png',
- 'modified': 'Tue, 25 Mar 2014 01:45:51 +0000',
- 'path': '/svs-v-barks.png',
- 'rev': '3fed61a002c12fc',
- 'revision': 67032602,
- 'root': 'box',
- 'size': '147.6 KB',
- 'thumb_exists': True}]
+ "revisions": [
+ {
+ "bytes": 0,
+ "client_mtime": "Wed, 31 Dec 1969 23:59:59 +0000",
+ "icon": "page_white_picture",
+ "is_deleted": True,
+ "is_dir": False,
+ "mime_type": "image/png",
+ "modified": "Tue, 25 Mar 2014 03:39:13 +0000",
+ "path": "/svs-v-barks.png",
+ "rev": "3fed741002c12fc",
+ "revision": 67032897,
+ "root": "box",
+ "size": "0 bytes",
+ "thumb_exists": True,
+ },
+ {
+ "bytes": 151164,
+ "client_mtime": "Sat, 13 Apr 2013 21:56:36 +0000",
+ "icon": "page_white_picture",
+ "is_dir": False,
+ "mime_type": "image/png",
+ "modified": "Tue, 25 Mar 2014 01:45:51 +0000",
+ "path": "/svs-v-barks.png",
+ "rev": "3fed61a002c12fc",
+ "revision": 67032602,
+ "root": "box",
+ "size": "147.6 KB",
+ "thumb_exists": True,
+ },
+ ],
}
class MockBox:
-
def put_file(self, full_path, file_obj, overwrite=False, parent_rev=None):
- return mock_responses['put_file']
-
- def metadata(self, path, list=True, file_limit=25000, hash=None, rev=None,
- include_deleted=False):
+ return mock_responses["put_file"]
+
+ def metadata(
+ self,
+ path,
+ list=True,
+ file_limit=25000,
+ hash=None,
+ rev=None,
+ include_deleted=False,
+ ):
if list:
- ret = mock_responses['metadata_list']
+ ret = mock_responses["metadata_list"]
else:
- ret = mock_responses['metadata_single']
- ret['path'] = path
+ ret = mock_responses["metadata_single"]
+ ret["path"] = path
return ret
def folder(*args, **kwargs):
- return mock_responses['folder']
+ return mock_responses["folder"]
def get_file_and_metadata(*args, **kwargs):
pass
def file_delete(self, path):
- return mock_responses['metadata_single']
+ return mock_responses["metadata_single"]
def revisions(self, path):
- ret = mock_responses['revisions']
+ ret = mock_responses["revisions"]
for each in ret:
- each['path'] = path
+ each["path"] = path
return ret
def user(self):
- return {'display_name': 'Mr. Box'}
+ return {"display_name": "Mr. Box"}
@contextmanager
diff --git a/addons/box/views.py b/addons/box/views.py
index 7e3694f75ae..660c120297c 100644
--- a/addons/box/views.py
+++ b/addons/box/views.py
@@ -1,49 +1,42 @@
"""Views for the node settings page."""
+
from flask import request
from addons.base import generic_views
from addons.box.serializer import BoxSerializer
-from website.project.decorators import must_have_addon, must_be_addon_authorizer
+from website.project.decorators import (
+ must_have_addon,
+ must_be_addon_authorizer,
+)
-SHORT_NAME = 'box'
-FULL_NAME = 'Box'
+SHORT_NAME = "box"
+FULL_NAME = "Box"
-box_account_list = generic_views.account_list(
- SHORT_NAME,
- BoxSerializer
-)
+box_account_list = generic_views.account_list(SHORT_NAME, BoxSerializer)
+
+box_import_auth = generic_views.import_auth(SHORT_NAME, BoxSerializer)
-box_import_auth = generic_views.import_auth(
- SHORT_NAME,
- BoxSerializer
-)
-@must_have_addon(SHORT_NAME, 'node')
+@must_have_addon(SHORT_NAME, "node")
@must_be_addon_authorizer(SHORT_NAME)
def box_folder_list(node_addon, **kwargs):
- """ Returns all the subsequent folders under the folder id passed.
- """
- folder_id = request.args.get('folder_id')
+ """Returns all the subsequent folders under the folder id passed."""
+ folder_id = request.args.get("folder_id")
return node_addon.get_folders(folder_id=folder_id)
-box_get_config = generic_views.get_config(
- SHORT_NAME,
- BoxSerializer
-)
+
+box_get_config = generic_views.get_config(SHORT_NAME, BoxSerializer)
+
def _set_folder(node_addon, folder, auth):
- uid = folder['id']
+ uid = folder["id"]
node_addon.set_folder(uid, auth=auth)
node_addon.save()
+
box_set_config = generic_views.set_config(
- SHORT_NAME,
- FULL_NAME,
- BoxSerializer,
- _set_folder
+ SHORT_NAME, FULL_NAME, BoxSerializer, _set_folder
)
-box_deauthorize_node = generic_views.deauthorize_node(
- SHORT_NAME
-)
+box_deauthorize_node = generic_views.deauthorize_node(SHORT_NAME)
diff --git a/addons/dataverse/apps.py b/addons/dataverse/apps.py
index 15b04f40f15..8242e0b2e56 100644
--- a/addons/dataverse/apps.py
+++ b/addons/dataverse/apps.py
@@ -4,58 +4,71 @@
from addons.dataverse.settings import MAX_UPLOAD_SIZE
HERE = os.path.dirname(os.path.abspath(__file__))
-TEMPLATE_PATH = os.path.join(
- HERE,
- 'templates'
-)
+TEMPLATE_PATH = os.path.join(HERE, "templates")
-class DataverseAddonAppConfig(BaseAddonAppConfig):
+class DataverseAddonAppConfig(BaseAddonAppConfig):
default = True
- name = 'addons.dataverse'
- label = 'addons_dataverse'
- full_name = 'Dataverse'
- short_name = 'dataverse'
- owners = ['user', 'node']
- configs = ['accounts', 'node']
- views = ['widget']
- categories = ['storage']
+ name = "addons.dataverse"
+ label = "addons_dataverse"
+ full_name = "Dataverse"
+ short_name = "dataverse"
+ owners = ["user", "node"]
+ configs = ["accounts", "node"]
+ views = ["widget"]
+ categories = ["storage"]
include_css = {
- 'widget': ['dataverse.css'],
- 'page': [],
+ "widget": ["dataverse.css"],
+ "page": [],
}
has_hgrid_files = True
- node_settings_template = os.path.join(TEMPLATE_PATH, 'dataverse_node_settings.mako')
- user_settings_template = os.path.join(TEMPLATE_PATH, 'dataverse_user_settings.mako')
+ node_settings_template = os.path.join(
+ TEMPLATE_PATH, "dataverse_node_settings.mako"
+ )
+ user_settings_template = os.path.join(
+ TEMPLATE_PATH, "dataverse_user_settings.mako"
+ )
max_file_size = MAX_UPLOAD_SIZE
@property
def get_hgrid_data(self):
# Avoid circular import
from addons.dataverse.views import _dataverse_root_folder
+
return _dataverse_root_folder
- FILE_ADDED = 'dataverse_file_added'
- FILE_REMOVED = 'dataverse_file_removed'
- DATASET_LINKED = 'dataverse_dataset_linked'
- DATASET_PUBLISHED = 'dataverse_dataset_published'
- STUDY_LINKED = 'dataverse_study_linked'
- STUDY_RELEASED = 'dataverse_study_released'
- NODE_AUTHORIZED = 'dataverse_node_authorized'
- NODE_DEAUTHORIZED = 'dataverse_node_deauthorized'
- NODE_DEAUTHORIZED_NO_USER = 'dataverse_node_deauthorized_no_user'
+ FILE_ADDED = "dataverse_file_added"
+ FILE_REMOVED = "dataverse_file_removed"
+ DATASET_LINKED = "dataverse_dataset_linked"
+ DATASET_PUBLISHED = "dataverse_dataset_published"
+ STUDY_LINKED = "dataverse_study_linked"
+ STUDY_RELEASED = "dataverse_study_released"
+ NODE_AUTHORIZED = "dataverse_node_authorized"
+ NODE_DEAUTHORIZED = "dataverse_node_deauthorized"
+ NODE_DEAUTHORIZED_NO_USER = "dataverse_node_deauthorized_no_user"
- actions = (FILE_ADDED, FILE_REMOVED, DATASET_LINKED, DATASET_PUBLISHED, STUDY_LINKED, STUDY_RELEASED, NODE_AUTHORIZED, NODE_DEAUTHORIZED, NODE_DEAUTHORIZED_NO_USER)
+ actions = (
+ FILE_ADDED,
+ FILE_REMOVED,
+ DATASET_LINKED,
+ DATASET_PUBLISHED,
+ STUDY_LINKED,
+ STUDY_RELEASED,
+ NODE_AUTHORIZED,
+ NODE_DEAUTHORIZED,
+ NODE_DEAUTHORIZED_NO_USER,
+ )
@property
def routes(self):
from .routes import api_routes
+
return [api_routes]
@property
def user_settings(self):
- return self.get_model('UserSettings')
+ return self.get_model("UserSettings")
@property
def node_settings(self):
- return self.get_model('NodeSettings')
+ return self.get_model("NodeSettings")
diff --git a/addons/dataverse/client.py b/addons/dataverse/client.py
index d8b2362d875..637f2b7a67f 100644
--- a/addons/dataverse/client.py
+++ b/addons/dataverse/client.py
@@ -1,13 +1,18 @@
from rest_framework import status as http_status
from dataverse import Connection
-from dataverse.exceptions import ConnectionError, UnauthorizedError, OperationFailedError
+from dataverse.exceptions import (
+ ConnectionError,
+ UnauthorizedError,
+ OperationFailedError,
+)
from framework.exceptions import HTTPError
from addons.dataverse import settings
from osf.utils.sanitize import strip_html
+
def _connect(host, token):
try:
return Connection(host, token)
@@ -49,7 +54,7 @@ def connect_from_settings_or_401(node_settings):
def get_files(dataset, published=False):
- version = 'latest-published' if published else 'latest'
+ version = "latest-published" if published else "latest"
return dataset.get_files(version)
@@ -61,16 +66,22 @@ def publish_dataverse(dataverse):
def publish_dataset(dataset):
- if dataset.get_state() == 'RELEASED':
- raise HTTPError(http_status.HTTP_409_CONFLICT, data=dict(
- message_short='Dataset conflict',
- message_long='This version of the dataset has already been published.'
- ))
+ if dataset.get_state() == "RELEASED":
+ raise HTTPError(
+ http_status.HTTP_409_CONFLICT,
+ data=dict(
+ message_short="Dataset conflict",
+ message_long="This version of the dataset has already been published.",
+ ),
+ )
if not dataset.dataverse.is_published:
- raise HTTPError(http_status.HTTP_405_METHOD_NOT_ALLOWED, data=dict(
- message_short='Method not allowed',
- message_long='A dataset cannot be published until its parent Dataverse is published.'
- ))
+ raise HTTPError(
+ http_status.HTTP_405_METHOD_NOT_ALLOWED,
+ data=dict(
+ message_short="Method not allowed",
+ message_long="A dataset cannot be published until its parent Dataverse is published.",
+ ),
+ )
try:
dataset.publish()
@@ -87,20 +98,28 @@ def get_datasets(dataverse):
def get_dataset(dataverse, doi):
if dataverse is None:
return
- dataset = dataverse.get_dataset_by_doi(doi, timeout=settings.REQUEST_TIMEOUT)
+ dataset = dataverse.get_dataset_by_doi(
+ doi, timeout=settings.REQUEST_TIMEOUT
+ )
try:
- if dataset and dataset.get_state() == 'DEACCESSIONED':
- raise HTTPError(http_status.HTTP_410_GONE, data=dict(
- message_short='Dataset deaccessioned',
- message_long='This dataset has been deaccessioned and can no longer be linked to the OSF.'
- ))
+ if dataset and dataset.get_state() == "DEACCESSIONED":
+ raise HTTPError(
+ http_status.HTTP_410_GONE,
+ data=dict(
+ message_short="Dataset deaccessioned",
+ message_long="This dataset has been deaccessioned and can no longer be linked to the OSF.",
+ ),
+ )
return dataset
except UnicodeDecodeError:
- raise HTTPError(http_status.HTTP_406_NOT_ACCEPTABLE, data=dict(
- message_short='Not acceptable',
- message_long='This dataset cannot be connected due to forbidden '
- 'characters in one or more of the file names.'
- ))
+ raise HTTPError(
+ http_status.HTTP_406_NOT_ACCEPTABLE,
+ data=dict(
+ message_short="Not acceptable",
+ message_long="This dataset cannot be connected due to forbidden "
+ "characters in one or more of the file names.",
+ ),
+ )
def get_dataverses(connection):
@@ -117,5 +136,7 @@ def get_dataverse(connection, alias):
def get_custom_publish_text(connection):
if connection is None:
- return ''
- return strip_html(connection.get_custom_publish_text(), tags=['strong', 'li', 'ul'])
+ return ""
+ return strip_html(
+ connection.get_custom_publish_text(), tags=["strong", "li", "ul"]
+ )
diff --git a/addons/dataverse/models.py b/addons/dataverse/models.py
index 1b4753d7cfa..cb4e9e2384a 100644
--- a/addons/dataverse/models.py
+++ b/addons/dataverse/models.py
@@ -1,8 +1,11 @@
from rest_framework import status as http_status
from addons.base import exceptions as addon_errors
-from addons.base.models import (BaseOAuthNodeSettings, BaseOAuthUserSettings,
- BaseStorageAddon)
+from addons.base.models import (
+ BaseOAuthNodeSettings,
+ BaseOAuthUserSettings,
+ BaseStorageAddon,
+)
from django.contrib.contenttypes.models import ContentType
from django.db import models
from framework.auth.decorators import Auth
@@ -13,19 +16,20 @@
from addons.dataverse.serializer import DataverseSerializer
from addons.dataverse.utils import DataverseNodeLogger
+
class DataverseFileNode(BaseFileNode):
- _provider = 'dataverse'
+ _provider = "dataverse"
@classmethod
def get_or_create(cls, target, path, **query_params):
- '''Override get_or_create for Dataverse.
+ """Override get_or_create for Dataverse.
Dataverse is weird and reuses paths, so we need to extract a "version"
query param to determine which file to get. We also don't want to "create"
here, as that might lead to integrity errors.
- '''
- version = query_params.get('version', None)
- if version not in {'latest', 'latest-published'}:
+ """
+ version = query_params.get("version", None)
+ if version not in {"latest", "latest-published"}:
raise addon_errors.QueryError(
'Dataverse requires a "version" query paramater. '
'Acceptable options are "latest" or "latest-published"'
@@ -36,7 +40,7 @@ def get_or_create(cls, target, path, **query_params):
obj = cls.objects.get(
target_object_id=target.id,
target_content_type=content_type,
- _path='/' + path.lstrip('/'),
+ _path="/" + path.lstrip("/"),
_history__0__extra__datasetVersion=version,
)
except cls.DoesNotExist:
@@ -52,12 +56,12 @@ class DataverseFolder(DataverseFileNode, Folder):
class DataverseFile(DataverseFileNode, File):
- version_identifier = 'version'
+ version_identifier = "version"
@property
def _hashes(self):
try:
- return self._history[-1]['extra']['hashes']
+ return self._history[-1]["extra"]["hashes"]
except (IndexError, KeyError):
return None
@@ -72,8 +76,9 @@ def update(self, revision, data, save=True, user=None):
class DataverseProvider:
"""An alternative to `ExternalProvider` not tied to OAuth"""
- name = 'Dataverse'
- short_name = 'dataverse'
+
+ name = "Dataverse"
+ short_name = "dataverse"
serializer = DataverseSerializer
def __init__(self, account=None):
@@ -82,9 +87,9 @@ def __init__(self, account=None):
self.account = account
def __repr__(self):
- return '<{name}: {status}>'.format(
+ return "<{name}: {status}>".format(
name=self.__class__.__name__,
- status=self.account.provider_id if self.account else 'anonymous'
+ status=self.account.provider_id if self.account else "anonymous",
)
@@ -102,7 +107,9 @@ class NodeSettings(BaseOAuthNodeSettings, BaseStorageAddon):
dataset_doi = models.TextField(blank=True, null=True)
_dataset_id = models.TextField(blank=True, null=True)
dataset = models.TextField(blank=True, null=True)
- user_settings = models.ForeignKey(UserSettings, null=True, blank=True, on_delete=models.CASCADE)
+ user_settings = models.ForeignKey(
+ UserSettings, null=True, blank=True, on_delete=models.CASCADE
+ )
@property
def folder_name(self):
@@ -110,7 +117,9 @@ def folder_name(self):
@property
def dataset_id(self):
- if self._dataset_id is None and (self.dataverse_alias and self.dataset_doi):
+ if self._dataset_id is None and (
+ self.dataverse_alias and self.dataset_doi
+ ):
connection = connect_from_settings_or_401(self)
dataverse = connection.get_dataverse(self.dataverse_alias)
dataset = dataverse.get_dataset_by_doi(self.dataset_doi)
@@ -136,10 +145,7 @@ def nodelogger(self):
auth = None
if self.user_settings:
auth = Auth(self.user_settings.owner)
- return DataverseNodeLogger(
- node=self.owner,
- auth=auth
- )
+ return DataverseNodeLogger(node=self.owner, auth=auth)
def set_folder(self, dataverse, dataset, auth=None):
self.dataverse_alias = dataverse.alias
@@ -153,21 +159,28 @@ def set_folder(self, dataverse, dataset, auth=None):
if auth:
self.owner.add_log(
- action='dataverse_dataset_linked',
+ action="dataverse_dataset_linked",
params={
- 'project': self.owner.parent_id,
- 'node': self.owner._id,
- 'dataset': dataset.title,
+ "project": self.owner.parent_id,
+ "node": self.owner._id,
+ "dataset": dataset.title,
},
auth=auth,
)
- def _get_fileobj_child_metadata(self, filenode, user, cookie=None, version=None):
+ def _get_fileobj_child_metadata(
+ self, filenode, user, cookie=None, version=None
+ ):
try:
- return super()._get_fileobj_child_metadata(filenode, user, cookie=cookie, version=version)
+ return super()._get_fileobj_child_metadata(
+ filenode, user, cookie=cookie, version=version
+ )
except HTTPError as e:
# The Dataverse API returns a 404 if the dataset has no published files
- if e.code == http_status.HTTP_404_NOT_FOUND and version == 'latest-published':
+ if (
+ e.code == http_status.HTTP_404_NOT_FOUND
+ and version == "latest-published"
+ ):
return []
raise
@@ -188,43 +201,44 @@ def deauthorize(self, auth=None, add_log=True):
if add_log and auth:
node = self.owner
self.owner.add_log(
- action='dataverse_node_deauthorized',
+ action="dataverse_node_deauthorized",
params={
- 'project': node.parent_id,
- 'node': node._id,
+ "project": node.parent_id,
+ "node": node._id,
},
auth=auth,
)
def serialize_waterbutler_credentials(self):
if not self.has_auth:
- raise exceptions.AddonError('Addon is not authorized')
- return {'token': self.external_account.oauth_secret}
+ raise exceptions.AddonError("Addon is not authorized")
+ return {"token": self.external_account.oauth_secret}
def serialize_waterbutler_settings(self):
if not self.folder_id:
- raise exceptions.AddonError('Dataverse is not configured')
+ raise exceptions.AddonError("Dataverse is not configured")
return {
- 'host': self.external_account.oauth_key,
- 'doi': self.dataset_doi,
- 'id': self.dataset_id,
- 'name': self.dataset,
+ "host": self.external_account.oauth_key,
+ "doi": self.dataset_doi,
+ "id": self.dataset_id,
+ "name": self.dataset,
}
def create_waterbutler_log(self, auth, action, metadata):
- url = self.owner.web_url_for('addon_view_or_download_file', path=metadata['path'], provider='dataverse')
+ url = self.owner.web_url_for(
+ "addon_view_or_download_file",
+ path=metadata["path"],
+ provider="dataverse",
+ )
self.owner.add_log(
- f'dataverse_{action}',
+ f"dataverse_{action}",
auth=auth,
params={
- 'project': self.owner.parent_id,
- 'node': self.owner._id,
- 'dataset': self.dataset,
- 'filename': metadata['materialized'].strip('/'),
- 'urls': {
- 'view': url,
- 'download': url + '?action=download'
- },
+ "project": self.owner.parent_id,
+ "node": self.owner._id,
+ "dataset": self.dataset,
+ "filename": metadata["materialized"].strip("/"),
+ "urls": {"view": url, "download": url + "?action=download"},
},
)
diff --git a/addons/dataverse/routes.py b/addons/dataverse/routes.py
index ad228b77f51..22a1429ad16 100644
--- a/addons/dataverse/routes.py
+++ b/addons/dataverse/routes.py
@@ -3,97 +3,97 @@
from . import views
api_routes = {
- 'rules': [
+ "rules": [
Rule(
- '/settings/dataverse/',
- 'get',
+ "/settings/dataverse/",
+ "get",
views.dataverse_user_config_get,
json_renderer,
),
Rule(
- '/settings/dataverse/accounts/',
- 'post',
+ "/settings/dataverse/accounts/",
+ "post",
views.dataverse_add_user_account,
json_renderer,
),
Rule(
- '/settings/dataverse/accounts/',
- 'get',
+ "/settings/dataverse/accounts/",
+ "get",
views.dataverse_account_list,
json_renderer,
),
Rule(
[
- '/project//dataverse/settings/',
- '/project//node//dataverse/settings/',
+ "/project//dataverse/settings/",
+ "/project//node//dataverse/settings/",
],
- 'get',
+ "get",
views.dataverse_get_config,
json_renderer,
),
Rule(
[
- '/project//dataverse/settings/',
- '/project//node//dataverse/settings/',
+ "/project//dataverse/settings/",
+ "/project//node//dataverse/settings/",
],
- 'post',
+ "post",
views.dataverse_set_config,
json_renderer,
),
Rule(
[
- '/project//dataverse/user-auth/',
- '/project//node//dataverse/user-auth/',
+ "/project//dataverse/user-auth/",
+ "/project//node//dataverse/user-auth/",
],
- 'put',
+ "put",
views.dataverse_import_auth,
json_renderer,
),
Rule(
[
- '/project//dataverse/user-auth/',
- '/project//node//dataverse/user-auth/',
+ "/project//dataverse/user-auth/",
+ "/project//node//dataverse/user-auth/",
],
- 'delete',
+ "delete",
views.dataverse_deauthorize_node,
json_renderer,
),
Rule(
[
- '/project//dataverse/list-datasets/',
- '/project//node//dataverse/list-datasets/',
+ "/project//dataverse/list-datasets/",
+ "/project//node//dataverse/list-datasets/",
],
- 'post',
+ "post",
views.dataverse_get_datasets,
json_renderer,
),
Rule(
[
- '/project//dataverse/hgrid/root/',
- '/project//node//dataverse/hgrid/root/',
+ "/project//dataverse/hgrid/root/",
+ "/project//node//dataverse/hgrid/root/",
],
- 'get',
+ "get",
views.dataverse_root_folder,
json_renderer,
),
Rule(
[
- '/project//dataverse/publish/',
- '/project//node//dataverse/publish/',
+ "/project//dataverse/publish/",
+ "/project//node//dataverse/publish/",
],
- 'put',
+ "put",
views.dataverse_publish_dataset,
json_renderer,
),
Rule(
[
- '/project//dataverse/widget/contents/',
- '/project//node//dataverse/widget/contents/',
+ "/project//dataverse/widget/contents/",
+ "/project//node//dataverse/widget/contents/",
],
- 'get',
+ "get",
views.dataverse_get_widget_contents,
json_renderer,
),
],
- 'prefix': '/api/v1'
+ "prefix": "/api/v1",
}
diff --git a/addons/dataverse/serializer.py b/addons/dataverse/serializer.py
index 0b8d66abceb..0b7ba02e510 100644
--- a/addons/dataverse/serializer.py
+++ b/addons/dataverse/serializer.py
@@ -5,8 +5,7 @@
class DataverseSerializer(OAuthAddonSerializer):
-
- addon_short_name = 'dataverse'
+ addon_short_name = "dataverse"
REQUIRED_URLS = []
@@ -14,10 +13,12 @@ class DataverseSerializer(OAuthAddonSerializer):
def serialize_account(self, external_account):
ret = super().serialize_account(external_account)
host = external_account.oauth_key
- ret.update({
- 'host': host,
- 'host_url': f'https://{host}',
- })
+ ret.update(
+ {
+ "host": host,
+ "host_url": f"https://{host}",
+ }
+ )
return ret
@@ -29,16 +30,18 @@ def credentials_owner(self):
def serialized_urls(self):
external_account = self.node_settings.external_account
ret = {
- 'settings': web_url_for('user_addons'), # TODO: Is this needed?
+ "settings": web_url_for("user_addons"), # TODO: Is this needed?
}
# Dataverse users do not currently have profile URLs
if external_account and external_account.profile_url:
- ret['owner'] = external_account.profile_url
+ ret["owner"] = external_account.profile_url
addon_urls = self.addon_serialized_urls
# Make sure developer returns set of needed urls
for url in self.REQUIRED_URLS:
- assert url in addon_urls, f"addon_serilized_urls must include key '{url}'"
+ assert (
+ url in addon_urls
+ ), f"addon_serilized_urls must include key '{url}'"
ret.update(addon_urls)
return ret
@@ -46,23 +49,23 @@ def serialized_urls(self):
def addon_serialized_urls(self):
node = self.node_settings.owner
external_account = self.node_settings.external_account
- host = external_account.oauth_key if external_account else ''
+ host = external_account.oauth_key if external_account else ""
return {
- 'create': api_url_for('dataverse_add_user_account'),
- 'set': node.api_url_for('dataverse_set_config'),
- 'importAuth': node.api_url_for('dataverse_import_auth'),
- 'deauthorize': node.api_url_for('dataverse_deauthorize_node'),
- 'getDatasets': node.api_url_for('dataverse_get_datasets'),
- 'datasetPrefix': 'https://doi.org/',
- 'dataversePrefix': f'http://{host}/dataverse/',
- 'accounts': api_url_for('dataverse_account_list'),
+ "create": api_url_for("dataverse_add_user_account"),
+ "set": node.api_url_for("dataverse_set_config"),
+ "importAuth": node.api_url_for("dataverse_import_auth"),
+ "deauthorize": node.api_url_for("dataverse_deauthorize_node"),
+ "getDatasets": node.api_url_for("dataverse_get_datasets"),
+ "datasetPrefix": "https://doi.org/",
+ "dataversePrefix": f"http://{host}/dataverse/",
+ "accounts": api_url_for("dataverse_account_list"),
}
@property
def serialized_node_settings(self):
result = super().serialized_node_settings
- result['hosts'] = DEFAULT_HOSTS
+ result["hosts"] = DEFAULT_HOSTS
# Update with Dataverse specific fields
if self.node_settings.has_auth:
@@ -71,22 +74,24 @@ def serialized_node_settings(self):
connection = client.connect_from_settings(self.node_settings)
dataverses = client.get_dataverses(connection)
- result.update({
- 'dataverseHost': dataverse_host,
- 'connected': connection is not None,
- 'dataverses': [
- {'title': dataverse.title, 'alias': dataverse.alias}
- for dataverse in dataverses
- ],
- 'savedDataverse': {
- 'title': self.node_settings.dataverse,
- 'alias': self.node_settings.dataverse_alias,
- },
- 'savedDataset': {
- 'title': self.node_settings.dataset,
- 'doi': self.node_settings.dataset_doi,
+ result.update(
+ {
+ "dataverseHost": dataverse_host,
+ "connected": connection is not None,
+ "dataverses": [
+ {"title": dataverse.title, "alias": dataverse.alias}
+ for dataverse in dataverses
+ ],
+ "savedDataverse": {
+ "title": self.node_settings.dataverse,
+ "alias": self.node_settings.dataverse_alias,
+ },
+ "savedDataset": {
+ "title": self.node_settings.dataset,
+ "doi": self.node_settings.dataset_doi,
+ },
}
- })
+ )
return result
diff --git a/addons/dataverse/settings/__init__.py b/addons/dataverse/settings/__init__.py
index eb5d40c3725..8ef6b3f33e5 100644
--- a/addons/dataverse/settings/__init__.py
+++ b/addons/dataverse/settings/__init__.py
@@ -6,4 +6,4 @@
try:
from .local import * # noqa
except ImportError:
- logger.warning('No local.py settings file found')
+ logger.warning("No local.py settings file found")
diff --git a/addons/dataverse/settings/defaults.py b/addons/dataverse/settings/defaults.py
index e01358309c8..81525340758 100644
--- a/addons/dataverse/settings/defaults.py
+++ b/addons/dataverse/settings/defaults.py
@@ -1,6 +1,6 @@
DEFAULT_HOSTS = [
- 'dataverse.harvard.edu', # Harvard PRODUCTION server
- 'dataverse.lib.virginia.edu' # University of Virginia server
+ "dataverse.harvard.edu", # Harvard PRODUCTION server
+ "dataverse.lib.virginia.edu", # University of Virginia server
]
REQUEST_TIMEOUT = 60
diff --git a/addons/dataverse/settings/local-dist.py b/addons/dataverse/settings/local-dist.py
index 8b6d3f8b91c..20251344efa 100644
--- a/addons/dataverse/settings/local-dist.py
+++ b/addons/dataverse/settings/local-dist.py
@@ -1,4 +1,4 @@
DEFAULT_HOSTS = [
- 'dataverse.harvard.edu', # Harvard PRODUCTION server
- 'dataverse.lib.virginia.edu' # University of Virginia server
+ "dataverse.harvard.edu", # Harvard PRODUCTION server
+ "dataverse.lib.virginia.edu", # University of Virginia server
]
diff --git a/addons/dataverse/settings/local-travis.py b/addons/dataverse/settings/local-travis.py
index 8b6d3f8b91c..20251344efa 100644
--- a/addons/dataverse/settings/local-travis.py
+++ b/addons/dataverse/settings/local-travis.py
@@ -1,4 +1,4 @@
DEFAULT_HOSTS = [
- 'dataverse.harvard.edu', # Harvard PRODUCTION server
- 'dataverse.lib.virginia.edu' # University of Virginia server
+ "dataverse.harvard.edu", # Harvard PRODUCTION server
+ "dataverse.lib.virginia.edu", # University of Virginia server
]
diff --git a/addons/dataverse/tests/factories.py b/addons/dataverse/tests/factories.py
index 6a67eb8fd5c..e8ee3e09888 100644
--- a/addons/dataverse/tests/factories.py
+++ b/addons/dataverse/tests/factories.py
@@ -1,18 +1,24 @@
"""Factory boy factories for the Dataverse addon."""
+
import factory
from factory.django import DjangoModelFactory
-from osf_tests.factories import UserFactory, ProjectFactory, ExternalAccountFactory
+from osf_tests.factories import (
+ UserFactory,
+ ProjectFactory,
+ ExternalAccountFactory,
+)
from addons.dataverse.models import UserSettings, NodeSettings
+
class DataverseAccountFactory(ExternalAccountFactory):
- provider = 'dataverse'
- provider_name = 'Dataverse'
+ provider = "dataverse"
+ provider_name = "Dataverse"
- provider_id = factory.Sequence(lambda n: f'id-{n}')
- oauth_key = factory.Sequence(lambda n: f'key-{n}')
- display_name = 'foo.bar.baz'
- oauth_secret = 'doremi-abc-123'
+ provider_id = factory.Sequence(lambda n: f"id-{n}")
+ oauth_key = factory.Sequence(lambda n: f"key-{n}")
+ display_name = "foo.bar.baz"
+ oauth_secret = "doremi-abc-123"
class DataverseUserSettingsFactory(DjangoModelFactory):
diff --git a/addons/dataverse/tests/test_client.py b/addons/dataverse/tests/test_client.py
index 19812d1c8b3..3e2777c0d5c 100644
--- a/addons/dataverse/tests/test_client.py
+++ b/addons/dataverse/tests/test_client.py
@@ -6,11 +6,21 @@
from dataverse.exceptions import UnauthorizedError
from addons.dataverse.models import NodeSettings
-from addons.dataverse.tests.utils import DataverseAddonTestCase, create_external_account
+from addons.dataverse.tests.utils import (
+ DataverseAddonTestCase,
+ create_external_account,
+)
from framework.exceptions import HTTPError
from addons.dataverse.client import (
- _connect, get_files, publish_dataset, get_datasets, get_dataset,
- get_dataverses, get_dataverse, connect_from_settings, connect_or_error,
+ _connect,
+ get_files,
+ publish_dataset,
+ get_datasets,
+ get_dataset,
+ get_dataverses,
+ get_dataverse,
+ connect_from_settings,
+ connect_or_error,
connect_from_settings_or_401,
)
from addons.dataverse import settings
@@ -19,12 +29,11 @@
class TestClient(DataverseAddonTestCase, unittest.TestCase):
-
def setUp(self):
super().setUp()
- self.host = 'some.host.url'
- self.token = 'some-fancy-api-token-which-is-long'
+ self.host = "some.host.url"
+ self.token = "some-fancy-api-token-which-is-long"
self.mock_connection = mock.create_autospec(Connection)
self.mock_dataverse = mock.create_autospec(Dataverse)
@@ -35,7 +44,7 @@ def setUp(self):
self.mock_dataset.dataverse = self.mock_dataverse
self.mock_dataverse.connection = self.mock_connection
- @mock.patch('addons.dataverse.client.Connection')
+ @mock.patch("addons.dataverse.client.Connection")
def test_connect(self, mock_connection):
mock_connection.return_value = mock.create_autospec(Connection)
c = _connect(self.host, self.token)
@@ -43,7 +52,7 @@ def test_connect(self, mock_connection):
mock_connection.assert_called_once_with(self.host, self.token)
assert c
- @mock.patch('addons.dataverse.client.Connection')
+ @mock.patch("addons.dataverse.client.Connection")
def test_connect_fail(self, mock_connection):
mock_connection.side_effect = UnauthorizedError()
with pytest.raises(UnauthorizedError):
@@ -51,7 +60,7 @@ def test_connect_fail(self, mock_connection):
mock_connection.assert_called_once_with(self.host, self.token)
- @mock.patch('addons.dataverse.client.Connection')
+ @mock.patch("addons.dataverse.client.Connection")
def test_connect_or_error(self, mock_connection):
mock_connection.return_value = mock.create_autospec(Connection)
c = connect_or_error(self.host, self.token)
@@ -59,8 +68,10 @@ def test_connect_or_error(self, mock_connection):
mock_connection.assert_called_once_with(self.host, self.token)
assert c
- @mock.patch('addons.dataverse.client.Connection')
- def test_connect_or_error_returns_401_when_client_raises_unauthorized_error(self, mock_connection):
+ @mock.patch("addons.dataverse.client.Connection")
+ def test_connect_or_error_returns_401_when_client_raises_unauthorized_error(
+ self, mock_connection
+ ):
mock_connection.side_effect = UnauthorizedError()
with pytest.raises(HTTPError) as cm:
connect_or_error(self.host, self.token)
@@ -68,11 +79,12 @@ def test_connect_or_error_returns_401_when_client_raises_unauthorized_error(self
mock_connection.assert_called_once_with(self.host, self.token)
assert cm.value.code == 401
- @mock.patch('addons.dataverse.client._connect')
+ @mock.patch("addons.dataverse.client._connect")
def test_connect_from_settings(self, mock_connect):
node_settings = NodeSettings()
node_settings.external_account = create_external_account(
- self.host, self.token,
+ self.host,
+ self.token,
)
connection = connect_from_settings(node_settings)
@@ -83,11 +95,12 @@ def test_connect_from_settings_none(self):
connection = connect_from_settings(None)
assert connection is None
- @mock.patch('addons.dataverse.client._connect')
+ @mock.patch("addons.dataverse.client._connect")
def test_connect_from_settings_or_401(self, mock_connect):
node_settings = NodeSettings()
node_settings.external_account = create_external_account(
- self.host, self.token,
+ self.host,
+ self.token,
)
connection = connect_from_settings_or_401(node_settings)
@@ -98,12 +111,13 @@ def test_connect_from_settings_or_401_none(self):
connection = connect_from_settings_or_401(None)
assert connection is None
- @mock.patch('addons.dataverse.client.Connection')
+ @mock.patch("addons.dataverse.client.Connection")
def test_connect_from_settings_or_401_forbidden(self, mock_connection):
mock_connection.side_effect = UnauthorizedError()
node_settings = NodeSettings()
node_settings.external_account = create_external_account(
- self.host, self.token,
+ self.host,
+ self.token,
)
with pytest.raises(HTTPError) as e:
@@ -115,19 +129,21 @@ def test_connect_from_settings_or_401_forbidden(self, mock_connection):
def test_get_files(self):
published = False
get_files(self.mock_dataset, published)
- self.mock_dataset.get_files.assert_called_once_with('latest')
+ self.mock_dataset.get_files.assert_called_once_with("latest")
def test_get_files_published(self):
published = True
get_files(self.mock_dataset, published)
- self.mock_dataset.get_files.assert_called_once_with('latest-published')
+ self.mock_dataset.get_files.assert_called_once_with("latest-published")
def test_publish_dataset(self):
publish_dataset(self.mock_dataset)
self.mock_dataset.publish.assert_called_once_with()
def test_publish_dataset_unpublished_dataverse(self):
- type(self.mock_dataverse).is_published = mock.PropertyMock(return_value=False)
+ type(self.mock_dataverse).is_published = mock.PropertyMock(
+ return_value=False
+ )
with pytest.raises(HTTPError) as e:
publish_dataset(self.mock_dataset)
@@ -138,15 +154,22 @@ def test_get_datasets(self):
mock_dataset1 = mock.create_autospec(Dataset)
mock_dataset2 = mock.create_autospec(Dataset)
mock_dataset3 = mock.create_autospec(Dataset)
- mock_dataset1.get_state.return_value = 'DRAFT'
- mock_dataset2.get_state.return_value = 'RELEASED'
- mock_dataset3.get_state.return_value = 'DEACCESSIONED'
+ mock_dataset1.get_state.return_value = "DRAFT"
+ mock_dataset2.get_state.return_value = "RELEASED"
+ mock_dataset3.get_state.return_value = "DEACCESSIONED"
self.mock_dataverse.get_datasets.return_value = [
- mock_dataset1, mock_dataset2, mock_dataset3
+ mock_dataset1,
+ mock_dataset2,
+ mock_dataset3,
]
datasets = get_datasets(self.mock_dataverse)
- assert self.mock_dataverse.get_datasets.assert_called_once_with(timeout=settings.REQUEST_TIMEOUT) is None
+ assert (
+ self.mock_dataverse.get_datasets.assert_called_once_with(
+ timeout=settings.REQUEST_TIMEOUT
+ )
+ is None
+ )
assert mock_dataset1 in datasets
assert mock_dataset2 in datasets
assert mock_dataset3 in datasets
@@ -156,55 +179,78 @@ def test_get_datasets_no_dataverse(self):
assert datasets == []
def test_get_dataset(self):
- self.mock_dataset.get_state.return_value = 'DRAFT'
+ self.mock_dataset.get_state.return_value = "DRAFT"
self.mock_dataverse.get_dataset_by_doi.return_value = self.mock_dataset
- s = get_dataset(self.mock_dataverse, 'My hdl')
- assert self.mock_dataverse.get_dataset_by_doi.assert_called_once_with('My hdl', timeout=settings.REQUEST_TIMEOUT) is None
+ s = get_dataset(self.mock_dataverse, "My hdl")
+ assert (
+ self.mock_dataverse.get_dataset_by_doi.assert_called_once_with(
+ "My hdl", timeout=settings.REQUEST_TIMEOUT
+ )
+ is None
+ )
assert s == self.mock_dataset
- @mock.patch('dataverse.dataverse.requests')
+ @mock.patch("dataverse.dataverse.requests")
def test_get_dataset_calls_patched_timeout_method(self, mock_requests):
# Verify optional timeout parameter is passed to requests by dataverse client.
# https://github.com/IQSS/dataverse-client-python/pull/27
dataverse = Dataverse(mock.Mock(), mock.Mock())
- dataverse.connection.auth = 'me'
- dataverse.collection.get.return_value = '123'
- mock_requests.get.side_effect = Exception('Done Testing')
+ dataverse.connection.auth = "me"
+ dataverse.collection.get.return_value = "123"
+ mock_requests.get.side_effect = Exception("Done Testing")
with pytest.raises(Exception) as e:
- get_dataset(dataverse, 'My hdl')
- assert mock_requests.get.assert_called_once_with('123', auth='me', timeout=settings.REQUEST_TIMEOUT) is None
- assert str(e.value) == 'Done Testing'
+ get_dataset(dataverse, "My hdl")
+ assert (
+ mock_requests.get.assert_called_once_with(
+ "123", auth="me", timeout=settings.REQUEST_TIMEOUT
+ )
+ is None
+ )
+ assert str(e.value) == "Done Testing"
def test_get_deaccessioned_dataset(self):
- self.mock_dataset.get_state.return_value = 'DEACCESSIONED'
+ self.mock_dataset.get_state.return_value = "DEACCESSIONED"
self.mock_dataverse.get_dataset_by_doi.return_value = self.mock_dataset
with pytest.raises(HTTPError) as e:
- get_dataset(self.mock_dataverse, 'My hdl')
+ get_dataset(self.mock_dataverse, "My hdl")
- assert self.mock_dataverse.get_dataset_by_doi.assert_called_once_with('My hdl', timeout=settings.REQUEST_TIMEOUT) is None
+ assert (
+ self.mock_dataverse.get_dataset_by_doi.assert_called_once_with(
+ "My hdl", timeout=settings.REQUEST_TIMEOUT
+ )
+ is None
+ )
assert e.value.code == 410
def test_get_bad_dataset(self):
- error = UnicodeDecodeError('utf-8', b'', 1, 2, 'jeepers')
+ error = UnicodeDecodeError("utf-8", b"", 1, 2, "jeepers")
self.mock_dataset.get_state.side_effect = error
self.mock_dataverse.get_dataset_by_doi.return_value = self.mock_dataset
with pytest.raises(HTTPError) as e:
- get_dataset(self.mock_dataverse, 'My hdl')
- assert self.mock_dataverse.get_dataset_by_doi.assert_called_once_with('My hdl', timeout=settings.REQUEST_TIMEOUT) is None
+ get_dataset(self.mock_dataverse, "My hdl")
+ assert (
+ self.mock_dataverse.get_dataset_by_doi.assert_called_once_with(
+ "My hdl", timeout=settings.REQUEST_TIMEOUT
+ )
+ is None
+ )
assert e.value.code == 406
def test_get_dataverses(self):
published_dv = mock.create_autospec(Dataverse)
unpublished_dv = mock.create_autospec(Dataverse)
type(published_dv).is_published = mock.PropertyMock(return_value=True)
- type(unpublished_dv).is_published = mock.PropertyMock(return_value=False)
+ type(unpublished_dv).is_published = mock.PropertyMock(
+ return_value=False
+ )
self.mock_connection.get_dataverses.return_value = [
- published_dv, unpublished_dv
+ published_dv,
+ unpublished_dv,
]
dvs = get_dataverses(self.mock_connection)
@@ -215,19 +261,23 @@ def test_get_dataverses(self):
assert len(dvs) == 2
def test_get_dataverse(self):
- type(self.mock_dataverse).is_published = mock.PropertyMock(return_value=True)
+ type(self.mock_dataverse).is_published = mock.PropertyMock(
+ return_value=True
+ )
self.mock_connection.get_dataverse.return_value = self.mock_dataverse
- d = get_dataverse(self.mock_connection, 'ALIAS')
- self.mock_connection.get_dataverse.assert_called_once_with('ALIAS')
+ d = get_dataverse(self.mock_connection, "ALIAS")
+ self.mock_connection.get_dataverse.assert_called_once_with("ALIAS")
assert d == self.mock_dataverse
def test_get_unpublished_dataverse(self):
- type(self.mock_dataverse).is_published = mock.PropertyMock(return_value=False)
+ type(self.mock_dataverse).is_published = mock.PropertyMock(
+ return_value=False
+ )
self.mock_connection.get_dataverse.return_value = self.mock_dataverse
- d = get_dataverse(self.mock_connection, 'ALIAS')
- self.mock_connection.get_dataverse.assert_called_once_with('ALIAS')
+ d = get_dataverse(self.mock_connection, "ALIAS")
+ self.mock_connection.get_dataverse.assert_called_once_with("ALIAS")
assert d == self.mock_dataverse
diff --git a/addons/dataverse/tests/test_logger.py b/addons/dataverse/tests/test_logger.py
index 6ff9670e41e..989f919e8a6 100644
--- a/addons/dataverse/tests/test_logger.py
+++ b/addons/dataverse/tests/test_logger.py
@@ -1,4 +1,5 @@
"""NodeLogger tests for the Dataverse addon."""
+
import pytest
from addons.base.tests.logger import StorageAddonNodeLoggerTestSuiteMixin
@@ -8,16 +9,18 @@
pytestmark = pytest.mark.django_db
-class TestDataverseNodeLogger(StorageAddonNodeLoggerTestSuiteMixin, OsfTestCase):
- addon_short_name = 'dataverse'
+class TestDataverseNodeLogger(
+ StorageAddonNodeLoggerTestSuiteMixin, OsfTestCase
+):
+ addon_short_name = "dataverse"
NodeLogger = DataverseNodeLogger
def setUp(self):
super().setUp()
node_settings = self.node.get_addon(self.addon_short_name)
- node_settings.dataset = 'fake dataset'
+ node_settings.dataset = "fake dataset"
node_settings.save()
def tearDown(self):
diff --git a/addons/dataverse/tests/test_model.py b/addons/dataverse/tests/test_model.py
index 6054ea40dac..97ef6d52daf 100644
--- a/addons/dataverse/tests/test_model.py
+++ b/addons/dataverse/tests/test_model.py
@@ -5,13 +5,16 @@
from tests.base import get_default_metaschema
from framework.auth.decorators import Auth
-from addons.base.tests.models import (OAuthAddonNodeSettingsTestSuiteMixin,
- OAuthAddonUserSettingTestSuiteMixin)
+from addons.base.tests.models import (
+ OAuthAddonNodeSettingsTestSuiteMixin,
+ OAuthAddonUserSettingTestSuiteMixin,
+)
from addons.dataverse.models import NodeSettings
from addons.dataverse.tests.factories import (
- DataverseAccountFactory, DataverseNodeSettingsFactory,
- DataverseUserSettingsFactory
+ DataverseAccountFactory,
+ DataverseNodeSettingsFactory,
+ DataverseUserSettingsFactory,
)
from addons.dataverse.tests import utils
from osf_tests.factories import DraftRegistrationFactory
@@ -19,10 +22,13 @@
pytestmark = pytest.mark.django_db
-class TestNodeSettings(OAuthAddonNodeSettingsTestSuiteMixin, utils.DataverseAddonTestCase, unittest.TestCase):
-
- short_name = 'dataverse'
- full_name = 'Dataverse'
+class TestNodeSettings(
+ OAuthAddonNodeSettingsTestSuiteMixin,
+ utils.DataverseAddonTestCase,
+ unittest.TestCase,
+):
+ short_name = "dataverse"
+ full_name = "Dataverse"
ExternalAccountFactory = DataverseAccountFactory
NodeSettingsFactory = DataverseNodeSettingsFactory
@@ -31,38 +37,38 @@ class TestNodeSettings(OAuthAddonNodeSettingsTestSuiteMixin, utils.DataverseAddo
def _node_settings_class_kwargs(self, node, user_settings):
return {
- 'user_settings': self.user_settings,
- '_dataset_id': '1234567890',
- 'dataset_doi': '10.123/DAVATERSE',
- 'owner': self.node
+ "user_settings": self.user_settings,
+ "_dataset_id": "1234567890",
+ "dataset_doi": "10.123/DAVATERSE",
+ "owner": self.node,
}
- @mock.patch('website.archiver.tasks.archive')
+ @mock.patch("website.archiver.tasks.archive")
def test_does_not_get_copied_to_registrations(self, mock_archive):
registration = self.node.register_node(
schema=get_default_metaschema(),
auth=Auth(user=self.node.creator),
- draft_registration=DraftRegistrationFactory(branched_from=self.node),
+ draft_registration=DraftRegistrationFactory(
+ branched_from=self.node
+ ),
)
- assert not registration.has_addon('dataverse')
+ assert not registration.has_addon("dataverse")
## Overrides ##
def test_create_log(self):
- action = 'file_added'
- filename = 'pizza.nii'
+ action = "file_added"
+ filename = "pizza.nii"
nlog = self.node.logs.count()
self.node_settings.create_waterbutler_log(
auth=Auth(user=self.user),
action=action,
- metadata={'path': filename, 'materialized': filename},
+ metadata={"path": filename, "materialized": filename},
)
self.node.reload()
assert self.node.logs.count() == nlog + 1
- assert self.node.logs.latest().action == \
- f'{self.short_name}_{action}'
- assert self.node.logs.latest().params['filename'] == \
- filename
+ assert self.node.logs.latest().action == f"{self.short_name}_{action}"
+ assert self.node.logs.latest().params["filename"] == filename
def test_set_folder(self):
dataverse = utils.create_mock_dataverse()
@@ -72,28 +78,27 @@ def test_set_folder(self):
assert self.node_settings.folder_id == dataset.id
# Log was saved
last_log = self.node.logs.latest()
- assert last_log.action == f'{self.short_name}_dataset_linked'
+ assert last_log.action == f"{self.short_name}_dataset_linked"
def test_serialize_credentials(self):
credentials = self.node_settings.serialize_waterbutler_credentials()
assert self.node_settings.external_account.oauth_secret is not None
- expected = {'token': self.node_settings.external_account.oauth_secret}
+ expected = {"token": self.node_settings.external_account.oauth_secret}
assert credentials == expected
def test_serialize_settings(self):
settings = self.node_settings.serialize_waterbutler_settings()
expected = {
- 'host': self.external_account.oauth_key,
- 'doi': self.node_settings.dataset_doi,
- 'id': self.node_settings.dataset_id,
- 'name': self.node_settings.dataset,
+ "host": self.external_account.oauth_key,
+ "doi": self.node_settings.dataset_doi,
+ "id": self.node_settings.dataset_id,
+ "name": self.node_settings.dataset,
}
assert settings == expected
class TestUserSettings(OAuthAddonUserSettingTestSuiteMixin, unittest.TestCase):
-
- short_name = 'dataverse'
- full_name = 'Dataverse'
+ short_name = "dataverse"
+ full_name = "Dataverse"
ExternalAccountFactory = DataverseAccountFactory
diff --git a/addons/dataverse/tests/test_serializer.py b/addons/dataverse/tests/test_serializer.py
index fa781d6b81f..c768d054cb4 100644
--- a/addons/dataverse/tests/test_serializer.py
+++ b/addons/dataverse/tests/test_serializer.py
@@ -12,23 +12,25 @@
pytestmark = pytest.mark.django_db
+
class TestDataverseSerializer(OAuthAddonSerializerTestSuiteMixin, OsfTestCase):
- addon_short_name = 'dataverse'
+ addon_short_name = "dataverse"
Serializer = DataverseSerializer
ExternalAccountFactory = DataverseAccountFactory
client = DataverseProvider
- required_settings = ('userIsOwner', 'nodeHasAuth', 'urls', 'userHasAuth')
- required_settings_authorized = ('ownerName', )
+ required_settings = ("userIsOwner", "nodeHasAuth", "urls", "userHasAuth")
+ required_settings_authorized = ("ownerName",)
def setUp(self):
super().setUp()
self.ser = self.Serializer(
- user_settings=self.user_settings,
- node_settings=self.node_settings
+ user_settings=self.user_settings, node_settings=self.node_settings
+ )
+ self.mock_api = mock.patch(
+ "addons.dataverse.serializer.client.connect_from_settings"
)
- self.mock_api = mock.patch('addons.dataverse.serializer.client.connect_from_settings')
self.mock_api.return_value = create_mock_connection()
self.mock_api.start()
@@ -39,14 +41,14 @@ def tearDown(self):
def test_serialize_acccount(self):
ea = self.ExternalAccountFactory()
expected = {
- 'id': ea._id,
- 'provider_id': ea.provider_id,
- 'provider_name': ea.provider_name,
- 'provider_short_name': ea.provider,
- 'display_name': ea.display_name,
- 'profile_url': ea.profile_url,
- 'nodes': [],
- 'host': ea.oauth_key,
- 'host_url': f'https://{ea.oauth_key}',
+ "id": ea._id,
+ "provider_id": ea.provider_id,
+ "provider_name": ea.provider_name,
+ "provider_short_name": ea.provider,
+ "display_name": ea.display_name,
+ "profile_url": ea.profile_url,
+ "nodes": [],
+ "host": ea.oauth_key,
+ "host_url": f"https://{ea.oauth_key}",
}
assert self.ser.serialize_account(ea) == expected
diff --git a/addons/dataverse/tests/test_utils.py b/addons/dataverse/tests/test_utils.py
index 2e230c9dffa..b8a1bce547d 100644
--- a/addons/dataverse/tests/test_utils.py
+++ b/addons/dataverse/tests/test_utils.py
@@ -14,46 +14,50 @@
class TestUtils(DataverseAddonTestCase):
-
def test_mock_connection(self):
mock_connection = create_mock_connection()
- assert mock_connection.token == 'snowman-frosty'
+ assert mock_connection.token == "snowman-frosty"
assert len(mock_connection.get_dataverses()) == 3
assert isinstance(mock_connection.get_dataverses()[0], Dataverse)
assert (
- mock_connection.get_dataverse(mock_connection.get_dataverses()[1].alias)
+ mock_connection.get_dataverse(
+ mock_connection.get_dataverses()[1].alias
+ )
== mock_connection.get_dataverses()[1]
)
def test_mock_dataverse(self):
- mock_dv = create_mock_dataverse('Example 1')
- assert mock_dv.title == 'Example 1'
+ mock_dv = create_mock_dataverse("Example 1")
+ assert mock_dv.title == "Example 1"
assert mock_dv.is_published
- assert mock_dv.alias == 'ALIAS1'
+ assert mock_dv.alias == "ALIAS1"
assert len(mock_dv.get_datasets()) == 3
assert isinstance(mock_dv.get_datasets()[0], Dataset)
- assert mock_dv.get_dataset_by_doi(mock_dv.get_datasets()[1].doi) == mock_dv.get_datasets()[1]
+ assert (
+ mock_dv.get_dataset_by_doi(mock_dv.get_datasets()[1].doi)
+ == mock_dv.get_datasets()[1]
+ )
def test_mock_dataset(self):
- dataset_id = 'DVN/23456'
- doi = f'doi:12.3456/{dataset_id}'
+ dataset_id = "DVN/23456"
+ doi = f"doi:12.3456/{dataset_id}"
mock_dataset = create_mock_dataset(dataset_id)
assert mock_dataset.doi == doi
- assert mock_dataset.citation == f'Example Citation for {dataset_id}'
- assert mock_dataset.title == f'Example ({dataset_id})'
+ assert mock_dataset.citation == f"Example Citation for {dataset_id}"
+ assert mock_dataset.title == f"Example ({dataset_id})"
assert mock_dataset.doi == doi
- assert mock_dataset.get_state() == 'DRAFT'
+ assert mock_dataset.get_state() == "DRAFT"
assert len(mock_dataset.get_files()) == 1
assert not mock_dataset.get_files()[0].is_published
assert mock_dataset.get_files(published=True)[0].is_published
- assert not mock_dataset.get_file('name.txt').is_published
- assert mock_dataset.get_file('name.txt', published=True).is_published
- assert not mock_dataset.get_file_by_id('123').is_published
- assert mock_dataset.get_file_by_id('123', published=True).is_published
+ assert not mock_dataset.get_file("name.txt").is_published
+ assert mock_dataset.get_file("name.txt", published=True).is_published
+ assert not mock_dataset.get_file_by_id("123").is_published
+ assert mock_dataset.get_file_by_id("123", published=True).is_published
def test_mock_dvn_file(self):
- fid = '65432'
+ fid = "65432"
mock_file = create_mock_draft_file(fid)
- assert mock_file.name == 'file.txt'
+ assert mock_file.name == "file.txt"
assert mock_file.id == fid
assert isinstance(mock_file, DataverseFile)
diff --git a/addons/dataverse/tests/test_views.py b/addons/dataverse/tests/test_views.py
index e6c1faabf4d..4dcf6391db1 100644
--- a/addons/dataverse/tests/test_views.py
+++ b/addons/dataverse/tests/test_views.py
@@ -8,7 +8,9 @@
from addons.base.tests.views import OAuthAddonConfigViewsTestCaseMixin
from addons.dataverse.models import DataverseProvider
from addons.dataverse.tests.utils import (
- create_mock_connection, DataverseAddonTestCase, create_external_account,
+ create_mock_connection,
+ DataverseAddonTestCase,
+ create_external_account,
)
from framework.auth.decorators import Auth
from osf_tests.factories import AuthUserFactory
@@ -18,11 +20,12 @@
pytestmark = pytest.mark.django_db
-class TestAuthViews(DataverseAddonTestCase, OsfTestCase, unittest.TestCase):
+class TestAuthViews(DataverseAddonTestCase, OsfTestCase, unittest.TestCase):
def test_deauthorize(self):
- url = api_url_for('dataverse_deauthorize_node',
- pid=self.project._primary_key)
+ url = api_url_for(
+ "dataverse_deauthorize_node", pid=self.project._primary_key
+ )
self.app.delete(url, auth=self.user.auth)
self.node_settings.reload()
@@ -35,37 +38,42 @@ def test_deauthorize(self):
# Log states that node was deauthorized
self.project.reload()
last_log = self.project.logs.latest()
- assert last_log.action == 'dataverse_node_deauthorized'
+ assert last_log.action == "dataverse_node_deauthorized"
log_params = last_log.params
- assert log_params['node'] == self.project._primary_key
- assert log_params['project'] is None
+ assert log_params["node"] == self.project._primary_key
+ assert log_params["project"] is None
def test_user_config_get(self):
- url = api_url_for('dataverse_user_config_get')
+ url = api_url_for("dataverse_user_config_get")
new_user = AuthUserFactory()
res = self.app.get(url, auth=new_user.auth)
- result = res.json.get('result')
- assert not result['userHasAuth']
- assert 'hosts' in result
- assert 'create' in result['urls']
+ result = res.json.get("result")
+ assert not result["userHasAuth"]
+ assert "hosts" in result
+ assert "create" in result["urls"]
# userHasAuth is true with external accounts
new_user.external_accounts.add(create_external_account())
new_user.save()
res = self.app.get(url, auth=self.user.auth)
- result = res.json.get('result')
- assert result['userHasAuth']
+ result = res.json.get("result")
+ assert result["userHasAuth"]
-class TestConfigViews(DataverseAddonTestCase, OAuthAddonConfigViewsTestCaseMixin, OsfTestCase):
+
+class TestConfigViews(
+ DataverseAddonTestCase, OAuthAddonConfigViewsTestCaseMixin, OsfTestCase
+):
connection = create_mock_connection()
Serializer = DataverseSerializer
client = DataverseProvider
def setUp(self):
super().setUp()
- self.mock_ser_api = mock.patch('addons.dataverse.serializer.client.connect_from_settings')
+ self.mock_ser_api = mock.patch(
+ "addons.dataverse.serializer.client.connect_from_settings"
+ )
self.mock_ser_api.return_value = create_mock_connection()
self.mock_ser_api.start()
@@ -73,58 +81,74 @@ def tearDown(self):
self.mock_ser_api.stop()
super().tearDown()
- @mock.patch('addons.dataverse.views.client.connect_from_settings')
+ @mock.patch("addons.dataverse.views.client.connect_from_settings")
def test_folder_list(self, mock_connection):
- #test_get_datasets
+ # test_get_datasets
mock_connection.return_value = self.connection
- url = api_url_for('dataverse_get_datasets', pid=self.project._primary_key)
- params = {'alias': 'ALIAS1'}
+ url = api_url_for(
+ "dataverse_get_datasets", pid=self.project._primary_key
+ )
+ params = {"alias": "ALIAS1"}
res = self.app.post(url, json=params, auth=self.user.auth)
- assert len(res.json['datasets']) == 3
- first = res.json['datasets'][0]
- assert first['title'] == 'Example (DVN/00001)'
- assert first['doi'] == 'doi:12.3456/DVN/00001'
+ assert len(res.json["datasets"]) == 3
+ first = res.json["datasets"][0]
+ assert first["title"] == "Example (DVN/00001)"
+ assert first["doi"] == "doi:12.3456/DVN/00001"
- @mock.patch('addons.dataverse.views.client.connect_from_settings')
+ @mock.patch("addons.dataverse.views.client.connect_from_settings")
def test_set_config(self, mock_connection):
mock_connection.return_value = self.connection
- url = self.project.api_url_for(f'{self.ADDON_SHORT_NAME}_set_config')
- res = self.app.post(url, json={
- 'dataverse': {'alias': 'ALIAS3'},
- 'dataset': {'doi': 'doi:12.3456/DVN/00003'},
- }, auth=self.user.auth)
+ url = self.project.api_url_for(f"{self.ADDON_SHORT_NAME}_set_config")
+ res = self.app.post(
+ url,
+ json={
+ "dataverse": {"alias": "ALIAS3"},
+ "dataset": {"doi": "doi:12.3456/DVN/00003"},
+ },
+ auth=self.user.auth,
+ )
assert res.status_code == http_status.HTTP_200_OK
self.project.reload()
- assert self.project.logs.latest().action == \
- f'{self.ADDON_SHORT_NAME}_dataset_linked'
- assert res.json['dataverse'] == self.connection.get_dataverse('ALIAS3').title
- assert res.json['dataset'] == \
- self.connection.get_dataverse('ALIAS3').get_dataset_by_doi('doi:12.3456/DVN/00003').title
+ assert (
+ self.project.logs.latest().action
+ == f"{self.ADDON_SHORT_NAME}_dataset_linked"
+ )
+ assert (
+ res.json["dataverse"]
+ == self.connection.get_dataverse("ALIAS3").title
+ )
+ assert (
+ res.json["dataset"]
+ == self.connection.get_dataverse("ALIAS3")
+ .get_dataset_by_doi("doi:12.3456/DVN/00003")
+ .title
+ )
def test_get_config(self):
- url = self.project.api_url_for(f'{self.ADDON_SHORT_NAME}_get_config')
+ url = self.project.api_url_for(f"{self.ADDON_SHORT_NAME}_get_config")
res = self.app.get(url, auth=self.user.auth)
assert res.status_code == http_status.HTTP_200_OK
- assert 'result' in res.json
+ assert "result" in res.json
serialized = self.Serializer().serialize_settings(
self.node_settings,
self.user,
)
- assert serialized == res.json['result']
+ assert serialized == res.json["result"]
- @mock.patch('addons.dataverse.views.client.connect_from_settings')
+ @mock.patch("addons.dataverse.views.client.connect_from_settings")
def test_set_config_no_dataset(self, mock_connection):
mock_connection.return_value = self.connection
num_old_logs = self.project.logs.count()
- url = api_url_for('dataverse_set_config',
- pid=self.project._primary_key)
+ url = api_url_for(
+ "dataverse_set_config", pid=self.project._primary_key
+ )
params = {
- 'dataverse': {'alias': 'ALIAS3'},
- 'dataset': {}, # The dataverse has no datasets
+ "dataverse": {"alias": "ALIAS3"},
+ "dataset": {}, # The dataverse has no datasets
}
# Select a different dataset
@@ -133,9 +157,9 @@ def test_set_config_no_dataset(self, mock_connection):
# Old settings did not change
assert res.status_code == http_status.HTTP_400_BAD_REQUEST
- assert self.node_settings.dataverse_alias == 'ALIAS2'
- assert self.node_settings.dataset == 'Example (DVN/00001)'
- assert self.node_settings.dataset_doi == 'doi:12.3456/DVN/00001'
+ assert self.node_settings.dataverse_alias == "ALIAS2"
+ assert self.node_settings.dataset == "Example (DVN/00001)"
+ assert self.node_settings.dataset_doi == "doi:12.3456/DVN/00001"
# Nothing was logged
self.project.reload()
@@ -143,16 +167,17 @@ def test_set_config_no_dataset(self, mock_connection):
class TestHgridViews(DataverseAddonTestCase, OsfTestCase, unittest.TestCase):
-
- @mock.patch('addons.dataverse.views.client.get_custom_publish_text')
- @mock.patch('addons.dataverse.views.client.connect_from_settings')
- @mock.patch('addons.dataverse.views.client.get_files')
- def test_dataverse_root_published(self, mock_files, mock_connection, mock_text):
+ @mock.patch("addons.dataverse.views.client.get_custom_publish_text")
+ @mock.patch("addons.dataverse.views.client.connect_from_settings")
+ @mock.patch("addons.dataverse.views.client.get_files")
+ def test_dataverse_root_published(
+ self, mock_files, mock_connection, mock_text
+ ):
mock_connection.return_value = create_mock_connection()
- mock_files.return_value = ['mock_file']
- mock_text.return_value = 'Do you want to publish?'
+ mock_files.return_value = ["mock_file"]
+ mock_text.return_value = "Do you want to publish?"
- self.project.set_privacy('public')
+ self.project.set_privacy("public")
self.project.save()
alias = self.node_settings.dataverse_alias
@@ -165,31 +190,34 @@ def test_dataverse_root_published(self, mock_files, mock_connection, mock_text):
self.node_settings.dataset_doi = doi
self.node_settings.save()
- url = api_url_for('dataverse_root_folder',
- pid=self.project._primary_key)
+ url = api_url_for(
+ "dataverse_root_folder", pid=self.project._primary_key
+ )
# Contributor can select between states, current state is correct
res = self.app.get(url, auth=self.user.auth)
- assert res.json[0]['permissions']['edit']
- assert res.json[0]['hasPublishedFiles']
- assert res.json[0]['version'] == 'latest-published'
+ assert res.json[0]["permissions"]["edit"]
+ assert res.json[0]["hasPublishedFiles"]
+ assert res.json[0]["version"] == "latest-published"
# Non-contributor gets published version, no options
user2 = AuthUserFactory()
res = self.app.get(url, auth=user2.auth)
- assert not res.json[0]['permissions']['edit']
- assert res.json[0]['hasPublishedFiles']
- assert res.json[0]['version'] == 'latest-published'
-
- @mock.patch('addons.dataverse.views.client.get_custom_publish_text')
- @mock.patch('addons.dataverse.views.client.connect_from_settings')
- @mock.patch('addons.dataverse.views.client.get_files')
- def test_dataverse_root_not_published(self, mock_files, mock_connection, mock_text):
+ assert not res.json[0]["permissions"]["edit"]
+ assert res.json[0]["hasPublishedFiles"]
+ assert res.json[0]["version"] == "latest-published"
+
+ @mock.patch("addons.dataverse.views.client.get_custom_publish_text")
+ @mock.patch("addons.dataverse.views.client.connect_from_settings")
+ @mock.patch("addons.dataverse.views.client.get_files")
+ def test_dataverse_root_not_published(
+ self, mock_files, mock_connection, mock_text
+ ):
mock_connection.return_value = create_mock_connection()
mock_files.return_value = []
- mock_text.return_value = 'Do you want to publish?'
+ mock_text.return_value = "Do you want to publish?"
- self.project.set_privacy('public')
+ self.project.set_privacy("public")
self.project.save()
alias = self.node_settings.dataverse_alias
@@ -202,28 +230,30 @@ def test_dataverse_root_not_published(self, mock_files, mock_connection, mock_te
self.node_settings.dataset_doi = doi
self.node_settings.save()
- url = api_url_for('dataverse_root_folder',
- pid=self.project._primary_key)
+ url = api_url_for(
+ "dataverse_root_folder", pid=self.project._primary_key
+ )
# Contributor gets draft, no options
res = self.app.get(url, auth=self.user.auth)
- assert res.json[0]['permissions']['edit']
- assert not res.json[0]['hasPublishedFiles']
- assert res.json[0]['version'] == 'latest'
+ assert res.json[0]["permissions"]["edit"]
+ assert not res.json[0]["hasPublishedFiles"]
+ assert res.json[0]["version"] == "latest"
# Non-contributor gets nothing
user2 = AuthUserFactory()
res = self.app.get(url, auth=user2.auth)
assert res.json == []
- @mock.patch('addons.dataverse.views.client.connect_from_settings')
- @mock.patch('addons.dataverse.views.client.get_files')
+ @mock.patch("addons.dataverse.views.client.connect_from_settings")
+ @mock.patch("addons.dataverse.views.client.get_files")
def test_dataverse_root_no_connection(self, mock_files, mock_connection):
mock_connection.return_value = create_mock_connection()
- mock_files.return_value = ['mock_file']
+ mock_files.return_value = ["mock_file"]
- url = api_url_for('dataverse_root_folder',
- pid=self.project._primary_key)
+ url = api_url_for(
+ "dataverse_root_folder", pid=self.project._primary_key
+ )
mock_connection.return_value = None
res = self.app.get(url, auth=self.user.auth)
@@ -233,38 +263,44 @@ def test_dataverse_root_incomplete(self):
self.node_settings.dataset_doi = None
self.node_settings.save()
- url = api_url_for('dataverse_root_folder',
- pid=self.project._primary_key)
+ url = api_url_for(
+ "dataverse_root_folder", pid=self.project._primary_key
+ )
res = self.app.get(url, auth=self.user.auth)
assert res.json == []
class TestCrudViews(DataverseAddonTestCase, OsfTestCase, unittest.TestCase):
-
- @mock.patch('addons.dataverse.views.client.connect_from_settings_or_401')
- @mock.patch('addons.dataverse.views.client.publish_dataset')
- @mock.patch('addons.dataverse.views.client.publish_dataverse')
- def test_dataverse_publish_dataset(self, mock_publish_dv, mock_publish_ds, mock_connection):
+ @mock.patch("addons.dataverse.views.client.connect_from_settings_or_401")
+ @mock.patch("addons.dataverse.views.client.publish_dataset")
+ @mock.patch("addons.dataverse.views.client.publish_dataverse")
+ def test_dataverse_publish_dataset(
+ self, mock_publish_dv, mock_publish_ds, mock_connection
+ ):
mock_connection.return_value = create_mock_connection()
- url = api_url_for('dataverse_publish_dataset',
- pid=self.project._primary_key)
- self.app.put(url, json={'publish_both': False}, auth=self.user.auth)
+ url = api_url_for(
+ "dataverse_publish_dataset", pid=self.project._primary_key
+ )
+ self.app.put(url, json={"publish_both": False}, auth=self.user.auth)
# Only dataset was published
assert not mock_publish_dv.called
assert mock_publish_ds.called
- @mock.patch('addons.dataverse.views.client.connect_from_settings_or_401')
- @mock.patch('addons.dataverse.views.client.publish_dataset')
- @mock.patch('addons.dataverse.views.client.publish_dataverse')
- def test_dataverse_publish_both(self, mock_publish_dv, mock_publish_ds, mock_connection):
+ @mock.patch("addons.dataverse.views.client.connect_from_settings_or_401")
+ @mock.patch("addons.dataverse.views.client.publish_dataset")
+ @mock.patch("addons.dataverse.views.client.publish_dataverse")
+ def test_dataverse_publish_both(
+ self, mock_publish_dv, mock_publish_ds, mock_connection
+ ):
mock_connection.return_value = create_mock_connection()
- url = api_url_for('dataverse_publish_dataset',
- pid=self.project._primary_key)
- self.app.put(url, json={'publish_both': True}, auth=self.user.auth)
+ url = api_url_for(
+ "dataverse_publish_dataset", pid=self.project._primary_key
+ )
+ self.app.put(url, json={"publish_both": True}, auth=self.user.auth)
# Both Dataverse and dataset were published
assert mock_publish_dv.called
@@ -272,9 +308,7 @@ def test_dataverse_publish_both(self, mock_publish_dv, mock_publish_ds, mock_con
class TestDataverseRestrictions(DataverseAddonTestCase, OsfTestCase):
-
def setUp(self):
-
super(DataverseAddonTestCase, self).setUp()
# Nasty contributor who will try to access content that he shouldn't
@@ -283,18 +317,20 @@ def setUp(self):
self.project.add_contributor(self.contrib, auth=Auth(self.user))
self.project.save()
- @mock.patch('addons.dataverse.views.client.connect_from_settings')
+ @mock.patch("addons.dataverse.views.client.connect_from_settings")
def test_restricted_set_dataset_not_owner(self, mock_connection):
mock_connection.return_value = create_mock_connection()
# Contributor has dataverse auth, but is not the node authorizer
- self.contrib.add_addon('dataverse')
+ self.contrib.add_addon("dataverse")
self.contrib.save()
- url = api_url_for('dataverse_set_config', pid=self.project._primary_key)
+ url = api_url_for(
+ "dataverse_set_config", pid=self.project._primary_key
+ )
params = {
- 'dataverse': {'alias': 'ALIAS1'},
- 'dataset': {'doi': 'doi:12.3456/DVN/00002'},
+ "dataverse": {"alias": "ALIAS1"},
+ "dataset": {"doi": "doi:12.3456/DVN/00002"},
}
res = self.app.post(url, json=params, auth=self.contrib.auth)
assert res.status_code == http_status.HTTP_403_FORBIDDEN
diff --git a/addons/dataverse/tests/utils.py b/addons/dataverse/tests/utils.py
index 349fdc25a57..e1095ae1d1c 100644
--- a/addons/dataverse/tests/utils.py
+++ b/addons/dataverse/tests/utils.py
@@ -9,17 +9,17 @@
class DataverseAddonTestCase(OAuthAddonTestCaseMixin, AddonTestCase):
- ADDON_SHORT_NAME = 'dataverse'
+ ADDON_SHORT_NAME = "dataverse"
ExternalAccountFactory = DataverseAccountFactory
Provider = DataverseProvider
def set_node_settings(self, settings):
super().set_node_settings(settings)
- settings.dataverse_alias = 'ALIAS2'
- settings.dataverse = 'Example 2'
- settings.dataset_doi = 'doi:12.3456/DVN/00001'
- settings._dataset_id = '18'
- settings.dataset = 'Example (DVN/00001)'
+ settings.dataverse_alias = "ALIAS2"
+ settings.dataverse = "Example 2"
+ settings.dataset_doi = "doi:12.3456/DVN/00001"
+ settings._dataset_id = "18"
+ settings.dataset = "Example (DVN/00001)"
settings.external_account = self.external_account
settings.save()
@@ -28,13 +28,13 @@ def folder(self):
return self.node_settings.folder_id
-def create_external_account(host='foo.bar.baz', token='doremi-abc-123'):
+def create_external_account(host="foo.bar.baz", token="doremi-abc-123"):
"""Creates external account for Dataverse with fields populated the same
way as `dataverse_add_user_account`"""
return ExternalAccountFactory(
- provider='dataverse',
- provider_name='Dataverse',
+ provider="dataverse",
+ provider_name="Dataverse",
display_name=host,
oauth_key=host,
oauth_secret=token,
@@ -44,14 +44,14 @@ def create_external_account(host='foo.bar.baz', token='doremi-abc-123'):
)
-def create_mock_connection(token='snowman-frosty'):
+def create_mock_connection(token="snowman-frosty"):
"""
Create a mock dataverse connection.
Pass any credentials other than the default parameters and the connection
will return none.
"""
- if not token == 'snowman-frosty':
+ if not token == "snowman-frosty":
return None
mock_connection = mock.create_autospec(Connection)
@@ -59,45 +59,50 @@ def create_mock_connection(token='snowman-frosty'):
mock_connection.token = token
mock_connection.get_dataverses.return_value = [
- create_mock_dataverse('Example 1'),
- create_mock_dataverse('Example 2'),
- create_mock_dataverse('Example 3'),
+ create_mock_dataverse("Example 1"),
+ create_mock_dataverse("Example 2"),
+ create_mock_dataverse("Example 3"),
]
def _get_dataverse(alias):
- return next((
- dataverse for dataverse in mock_connection.get_dataverses()
- if alias is not None and dataverse.title[-1] == alias[-1]), None
+ return next(
+ (
+ dataverse
+ for dataverse in mock_connection.get_dataverses()
+ if alias is not None and dataverse.title[-1] == alias[-1]
+ ),
+ None,
)
- mock_connection.get_dataverse = mock.MagicMock(
- side_effect=_get_dataverse
- )
+ mock_connection.get_dataverse = mock.MagicMock(side_effect=_get_dataverse)
mock_connection.get_dataverse.return_value = create_mock_dataverse()
return mock_connection
-def create_mock_dataverse(title='Example Dataverse 0'):
-
+def create_mock_dataverse(title="Example Dataverse 0"):
mock_dataverse = mock.create_autospec(Dataverse)
type(mock_dataverse).title = mock.PropertyMock(return_value=title)
type(mock_dataverse).is_published = mock.PropertyMock(return_value=True)
type(mock_dataverse).alias = mock.PropertyMock(
- return_value=f'ALIAS{title[-1]}'
+ return_value=f"ALIAS{title[-1]}"
)
mock_dataverse.get_datasets.return_value = [
- create_mock_dataset('DVN/00001'),
- create_mock_dataset('DVN/00002'),
- create_mock_dataset('DVN/00003'),
+ create_mock_dataset("DVN/00001"),
+ create_mock_dataset("DVN/00002"),
+ create_mock_dataset("DVN/00003"),
]
def _get_dataset_by_doi(doi, timeout=None):
- return next((
- dataset for dataset in mock_dataverse.get_datasets(timeout=timeout)
- if dataset.doi == doi), None
+ return next(
+ (
+ dataset
+ for dataset in mock_dataverse.get_datasets(timeout=timeout)
+ if dataset.doi == doi
+ ),
+ None,
)
mock_dataverse.get_dataset_by_doi = mock.MagicMock(
@@ -107,59 +112,68 @@ def _get_dataset_by_doi(doi, timeout=None):
return mock_dataverse
-def create_mock_dataset(id='DVN/12345'):
+def create_mock_dataset(id="DVN/12345"):
mock_dataset = mock.create_autospec(Dataset)
- mock_dataset.citation = f'Example Citation for {id}'
- mock_dataset.title = f'Example ({id})'
- mock_dataset.doi = f'doi:12.3456/{id}'
- mock_dataset.id = '18'
- mock_dataset.get_state.return_value = 'DRAFT'
+ mock_dataset.citation = f"Example Citation for {id}"
+ mock_dataset.title = f"Example ({id})"
+ mock_dataset.doi = f"doi:12.3456/{id}"
+ mock_dataset.id = "18"
+ mock_dataset.get_state.return_value = "DRAFT"
def _create_file(name, published=False):
- return create_mock_published_file() if published else create_mock_draft_file()
+ return (
+ create_mock_published_file()
+ if published
+ else create_mock_draft_file()
+ )
def _create_files(published=False):
- return [_create_file('name.txt', published)]
+ return [_create_file("name.txt", published)]
mock_dataset.get_files = mock.MagicMock(side_effect=_create_files)
mock_dataset.get_file = mock.MagicMock(side_effect=_create_file)
mock_dataset.get_file_by_id = mock.MagicMock(side_effect=_create_file)
# Fail if not given a valid ID
- if 'DVN' in id:
+ if "DVN" in id:
return mock_dataset
-def create_mock_draft_file(id='54321'):
+
+def create_mock_draft_file(id="54321"):
mock_file = mock.create_autospec(DataverseFile)
- mock_file.name = 'file.txt'
+ mock_file.name = "file.txt"
mock_file.id = id
mock_file.is_published = False
return mock_file
-def create_mock_published_file(id='54321'):
+
+def create_mock_published_file(id="54321"):
mock_file = mock.create_autospec(DataverseFile)
- mock_file.name = 'published.txt'
+ mock_file.name = "published.txt"
mock_file.id = id
mock_file.is_published = True
return mock_file
+
mock_responses = {
- 'contents': {
- 'kind': 'item',
- 'name': 'file.txt',
- 'ext': '.txt',
- 'file_id': '54321',
- 'urls': {'download': '/project/xxxxx/dataverse/file/54321/download/',
- 'delete': '/api/v1/project/xxxxx/dataverse/file/54321/',
- 'view': '/project/xxxxx/dataverse/file/54321/'},
- 'permissions': {'edit': False, 'view': True},
- 'addon': 'dataverse',
- 'hasPublishedFiles': True,
- 'state': 'published',
+ "contents": {
+ "kind": "item",
+ "name": "file.txt",
+ "ext": ".txt",
+ "file_id": "54321",
+ "urls": {
+ "download": "/project/xxxxx/dataverse/file/54321/download/",
+ "delete": "/api/v1/project/xxxxx/dataverse/file/54321/",
+ "view": "/project/xxxxx/dataverse/file/54321/",
+ },
+ "permissions": {"edit": False, "view": True},
+ "addon": "dataverse",
+ "hasPublishedFiles": True,
+ "state": "published",
}
}
diff --git a/addons/dataverse/utils.py b/addons/dataverse/utils.py
index d0fc8da232e..bfecfb57578 100644
--- a/addons/dataverse/utils.py
+++ b/addons/dataverse/utils.py
@@ -1,24 +1,25 @@
from addons.base.logger import AddonNodeLogger
-class DataverseNodeLogger(AddonNodeLogger):
- addon_short_name = 'dataverse'
+class DataverseNodeLogger(AddonNodeLogger):
+ addon_short_name = "dataverse"
def _log_params(self):
- node_settings = self.node.get_addon('dataverse')
+ node_settings = self.node.get_addon("dataverse")
return {
- 'project': self.node.parent_id,
- 'node': self.node._primary_key,
- 'dataset': node_settings.dataset if node_settings else None
+ "project": self.node.parent_id,
+ "node": self.node._primary_key,
+ "dataset": node_settings.dataset if node_settings else None,
}
+
def serialize_dataverse_widget(node):
- node_addon = node.get_addon('dataverse')
- widget_url = node.api_url_for('dataverse_get_widget_contents')
+ node_addon = node.get_addon("dataverse")
+ widget_url = node.api_url_for("dataverse_get_widget_contents")
dataverse_widget_data = {
- 'complete': node_addon.complete,
- 'widget_url': widget_url,
+ "complete": node_addon.complete,
+ "widget_url": widget_url,
}
dataverse_widget_data.update(node_addon.config.to_json())
diff --git a/addons/dataverse/views.py b/addons/dataverse/views.py
index 9b549e6de9b..ea13385f67a 100644
--- a/addons/dataverse/views.py
+++ b/addons/dataverse/views.py
@@ -1,4 +1,5 @@
"""Views for the node settings page."""
+
from rest_framework import status as http_status
from django.utils import timezone
@@ -18,69 +19,68 @@
from osf.models import ExternalAccount
from osf.utils.permissions import WRITE
from website.project.decorators import (
- must_have_addon, must_be_addon_authorizer,
- must_have_permission, must_not_be_registration,
- must_be_contributor_or_public
+ must_have_addon,
+ must_be_addon_authorizer,
+ must_have_permission,
+ must_not_be_registration,
+ must_be_contributor_or_public,
)
from website.util import rubeus, api_url_for
-SHORT_NAME = 'dataverse'
-FULL_NAME = 'Dataverse'
+SHORT_NAME = "dataverse"
+FULL_NAME = "Dataverse"
dataverse_account_list = generic_views.account_list(
- SHORT_NAME,
- DataverseSerializer
+ SHORT_NAME, DataverseSerializer
)
dataverse_import_auth = generic_views.import_auth(
- SHORT_NAME,
- DataverseSerializer
+ SHORT_NAME, DataverseSerializer
)
-dataverse_deauthorize_node = generic_views.deauthorize_node(
- SHORT_NAME
-)
+dataverse_deauthorize_node = generic_views.deauthorize_node(SHORT_NAME)
dataverse_get_config = generic_views.get_config(
- SHORT_NAME,
- DataverseSerializer
+ SHORT_NAME, DataverseSerializer
)
## Auth ##
+
@must_be_logged_in
def dataverse_user_config_get(auth, **kwargs):
"""View for getting a JSON representation of the logged-in user's
Dataverse user settings.
"""
- user_addon = auth.user.get_addon('dataverse')
+ user_addon = auth.user.get_addon("dataverse")
user_has_auth = False
if user_addon:
user_has_auth = user_addon.has_auth
return {
- 'result': {
- 'userHasAuth': user_has_auth,
- 'urls': {
- 'create': api_url_for('dataverse_add_user_account'),
- 'accounts': api_url_for('dataverse_account_list'),
+ "result": {
+ "userHasAuth": user_has_auth,
+ "urls": {
+ "create": api_url_for("dataverse_add_user_account"),
+ "accounts": api_url_for("dataverse_account_list"),
},
- 'hosts': DEFAULT_HOSTS,
+ "hosts": DEFAULT_HOSTS,
},
}, http_status.HTTP_200_OK
## Config ##
+
@must_be_logged_in
def dataverse_add_user_account(auth, **kwargs):
"""Verifies new external account credentials and adds to user's list"""
user = auth.user
provider = DataverseProvider()
- host = request.json.get('host').rstrip('/')
- api_token = request.json.get('api_token')
+ host = request.json.get("host").rstrip("/")
+ api_token = request.json.get("api_token")
# Verify that credentials are valid
client.connect_or_error(host, api_token)
@@ -90,36 +90,36 @@ def dataverse_add_user_account(auth, **kwargs):
provider.account = ExternalAccount(
provider=provider.short_name,
provider_name=provider.name,
- display_name=host, # no username; show host
- oauth_key=host, # hijacked; now host
+ display_name=host, # no username; show host
+ oauth_key=host, # hijacked; now host
oauth_secret=api_token, # hijacked; now api_token
- provider_id=api_token, # Change to username if Dataverse allows
+ provider_id=api_token, # Change to username if Dataverse allows
)
provider.account.save()
except ValidationError:
# ... or get the old one
provider.account = ExternalAccount.objects.get(
- provider=provider.short_name,
- provider_id=api_token
+ provider=provider.short_name, provider_id=api_token
)
if not user.external_accounts.filter(id=provider.account.id).exists():
user.external_accounts.add(provider.account)
- user_addon = auth.user.get_addon('dataverse')
+ user_addon = auth.user.get_addon("dataverse")
if not user_addon:
- user.add_addon('dataverse')
+ user.add_addon("dataverse")
user.save()
# Need to ensure that the user has dataverse enabled at this point
- user.get_or_add_addon('dataverse', auth=auth)
+ user.get_or_add_addon("dataverse", auth=auth)
user.save()
return {}
+
@must_have_permission(WRITE)
-@must_have_addon(SHORT_NAME, 'user')
-@must_have_addon(SHORT_NAME, 'node')
+@must_have_addon(SHORT_NAME, "user")
+@must_have_addon(SHORT_NAME, "node")
@must_be_addon_authorizer(SHORT_NAME)
def dataverse_set_config(node_addon, auth, **kwargs):
"""Saves selected Dataverse and dataset to node settings"""
@@ -130,8 +130,8 @@ def dataverse_set_config(node_addon, auth, **kwargs):
if user_settings and user_settings.owner != user:
raise HTTPError(http_status.HTTP_403_FORBIDDEN)
- alias = request.json.get('dataverse', {}).get('alias')
- doi = request.json.get('dataset', {}).get('doi')
+ alias = request.json.get("dataverse", {}).get("alias")
+ doi = request.json.get("dataset", {}).get("doi")
if doi is None or alias is None:
return HTTPError(http_status.HTTP_400_BAD_REQUEST)
@@ -142,35 +142,42 @@ def dataverse_set_config(node_addon, auth, **kwargs):
node_addon.set_folder(dataverse, dataset, auth)
- return {'dataverse': dataverse.title, 'dataset': dataset.title}, http_status.HTTP_200_OK
+ return {
+ "dataverse": dataverse.title,
+ "dataset": dataset.title,
+ }, http_status.HTTP_200_OK
@must_have_permission(WRITE)
-@must_have_addon(SHORT_NAME, 'user')
-@must_have_addon(SHORT_NAME, 'node')
+@must_have_addon(SHORT_NAME, "user")
+@must_have_addon(SHORT_NAME, "node")
def dataverse_get_datasets(node_addon, **kwargs):
"""Get list of datasets from provided Dataverse alias"""
- alias = request.json.get('alias')
+ alias = request.json.get("alias")
connection = client.connect_from_settings(node_addon)
dataverse = client.get_dataverse(connection, alias)
datasets = client.get_datasets(dataverse)
ret = {
- 'alias': alias, # include alias to verify dataset container
- 'datasets': [{'title': dataset.title, 'doi': dataset.doi} for dataset in datasets],
+ "alias": alias, # include alias to verify dataset container
+ "datasets": [
+ {"title": dataset.title, "doi": dataset.doi}
+ for dataset in datasets
+ ],
}
return ret, http_status.HTTP_200_OK
+
## Crud ##
@must_have_permission(WRITE)
@must_not_be_registration
-@must_have_addon(SHORT_NAME, 'node')
+@must_have_addon(SHORT_NAME, "node")
@must_be_addon_authorizer(SHORT_NAME)
def dataverse_publish_dataset(node_addon, auth, **kwargs):
node = node_addon.owner
- publish_both = request.json.get('publish_both', False)
+ publish_both = request.json.get("publish_both", False)
now = timezone.now()
@@ -185,25 +192,29 @@ def dataverse_publish_dataset(node_addon, auth, **kwargs):
# Add a log
node.add_log(
- action='dataverse_dataset_published',
+ action="dataverse_dataset_published",
params={
- 'project': node.parent_id,
- 'node': node._id,
- 'dataset': dataset.title,
+ "project": node.parent_id,
+ "node": node._id,
+ "dataset": dataset.title,
},
auth=auth,
log_date=now,
)
- return {'dataset': dataset.title}, http_status.HTTP_200_OK
+ return {"dataset": dataset.title}, http_status.HTTP_200_OK
+
## HGRID ##
+
def _dataverse_root_folder(node_addon, auth, **kwargs):
node = node_addon.owner
- default_version = 'latest-published'
- version = 'latest-published' if not node.can_edit(auth) else default_version
+ default_version = "latest-published"
+ version = (
+ "latest-published" if not node.can_edit(auth) else default_version
+ )
# Quit if no dataset linked
if not node_addon.complete:
@@ -212,21 +223,25 @@ def _dataverse_root_folder(node_addon, auth, **kwargs):
can_edit = node.can_edit(auth)
permissions = {
- 'edit': can_edit and not node.is_registration,
- 'view': node.can_view(auth)
+ "edit": can_edit and not node.is_registration,
+ "view": node.can_view(auth),
}
try:
connection = client.connect_from_settings(node_addon)
- dataverse = client.get_dataverse(connection, node_addon.dataverse_alias)
+ dataverse = client.get_dataverse(
+ connection, node_addon.dataverse_alias
+ )
dataset = client.get_dataset(dataverse, node_addon.dataset_doi)
except SSLError:
- return [rubeus.build_addon_root(
- node_addon,
- node_addon.dataset,
- permissions=permissions,
- private_key=kwargs.get('view_only', None),
- )]
+ return [
+ rubeus.build_addon_root(
+ node_addon,
+ node_addon.dataset,
+ permissions=permissions,
+ private_key=kwargs.get("view_only", None),
+ )
+ ]
# Quit if doi does not produce a dataset
if dataset is None:
@@ -237,20 +252,20 @@ def _dataverse_root_folder(node_addon, auth, **kwargs):
# Produce draft version or quit if no published version is available
if not published_files:
if can_edit:
- version = 'latest'
+ version = "latest"
else:
return []
urls = {
- 'publish': node.api_url_for('dataverse_publish_dataset'),
+ "publish": node.api_url_for("dataverse_publish_dataset"),
}
# determine if there are any changes between the published and draft
# versions of the dataset
try:
- dataset.get_metadata('latest-published')
+ dataset.get_metadata("latest-published")
dataset_is_published = True
- dataset_draft_modified = dataset.get_state() == 'DRAFT'
+ dataset_draft_modified = dataset.get_state() == "DRAFT"
except VersionJsonNotFoundError:
dataset_is_published = False
dataset_draft_modified = True
@@ -262,43 +277,44 @@ def _dataverse_root_folder(node_addon, auth, **kwargs):
try:
host_custom_publish_text = client.get_custom_publish_text(connection)
except OperationFailedError:
- host_custom_publish_text = ''
-
- return [rubeus.build_addon_root(
- node_addon,
- node_addon.dataset,
- urls=urls,
- permissions=permissions,
- dataset=node_addon.dataset,
- doi=dataset.doi,
- dataverse=dataverse.title,
- hasPublishedFiles=bool(published_files),
- dataverseIsPublished=dataverse.is_published,
- datasetIsPublished=dataset_is_published,
- datasetDraftModified=dataset_draft_modified,
- version=version,
- host=dataverse_host,
- hostCustomPublishText=host_custom_publish_text,
- private_key=kwargs.get('view_only', None),
- )]
+ host_custom_publish_text = ""
+
+ return [
+ rubeus.build_addon_root(
+ node_addon,
+ node_addon.dataset,
+ urls=urls,
+ permissions=permissions,
+ dataset=node_addon.dataset,
+ doi=dataset.doi,
+ dataverse=dataverse.title,
+ hasPublishedFiles=bool(published_files),
+ dataverseIsPublished=dataverse.is_published,
+ datasetIsPublished=dataset_is_published,
+ datasetDraftModified=dataset_draft_modified,
+ version=version,
+ host=dataverse_host,
+ hostCustomPublishText=host_custom_publish_text,
+ private_key=kwargs.get("view_only", None),
+ )
+ ]
@must_be_contributor_or_public
-@must_have_addon(SHORT_NAME, 'node')
+@must_have_addon(SHORT_NAME, "node")
def dataverse_root_folder(node_addon, auth, **kwargs):
return _dataverse_root_folder(node_addon, auth=auth)
@must_be_contributor_or_public
-@must_have_addon(SHORT_NAME, 'node')
+@must_have_addon(SHORT_NAME, "node")
def dataverse_get_widget_contents(node_addon, **kwargs):
-
data = {
- 'connected': False,
+ "connected": False,
}
if not node_addon.complete:
- return {'data': data}, http_status.HTTP_200_OK
+ return {"data": data}, http_status.HTTP_200_OK
doi = node_addon.dataset_doi
alias = node_addon.dataverse_alias
@@ -308,19 +324,21 @@ def dataverse_get_widget_contents(node_addon, **kwargs):
dataset = client.get_dataset(dataverse, doi)
if dataset is None:
- return {'data': data}, http_status.HTTP_400_BAD_REQUEST
+ return {"data": data}, http_status.HTTP_400_BAD_REQUEST
dataverse_host = node_addon.external_account.oauth_key
- dataverse_url = f'http://{dataverse_host}/dataverse/{alias}'
- dataset_url = 'https://doi.org/' + doi
-
- data.update({
- 'connected': True,
- 'dataverse': node_addon.dataverse,
- 'dataverseUrl': dataverse_url,
- 'dataset': node_addon.dataset,
- 'doi': doi,
- 'datasetUrl': dataset_url,
- 'citation': dataset.citation,
- })
- return {'data': data}, http_status.HTTP_200_OK
+ dataverse_url = f"http://{dataverse_host}/dataverse/{alias}"
+ dataset_url = "https://doi.org/" + doi
+
+ data.update(
+ {
+ "connected": True,
+ "dataverse": node_addon.dataverse,
+ "dataverseUrl": dataverse_url,
+ "dataset": node_addon.dataset,
+ "doi": doi,
+ "datasetUrl": dataset_url,
+ "citation": dataset.citation,
+ }
+ )
+ return {"data": data}, http_status.HTTP_200_OK
diff --git a/addons/dropbox/apps.py b/addons/dropbox/apps.py
index 07ec04355d2..c5e1d6fe6d3 100644
--- a/addons/dropbox/apps.py
+++ b/addons/dropbox/apps.py
@@ -2,21 +2,20 @@
from addons.dropbox.settings import MAX_UPLOAD_SIZE
-dropbox_root_folder = generic_root_folder('dropbox')
+dropbox_root_folder = generic_root_folder("dropbox")
class DropboxAddonAppConfig(BaseAddonAppConfig):
-
default = True
- name = 'addons.dropbox'
- label = 'addons_dropbox'
- full_name = 'Dropbox'
- short_name = 'dropbox'
- configs = ['accounts', 'node']
+ name = "addons.dropbox"
+ label = "addons_dropbox"
+ full_name = "Dropbox"
+ short_name = "dropbox"
+ configs = ["accounts", "node"]
has_hgrid_files = True
max_file_size = MAX_UPLOAD_SIZE
- owners = ['user', 'node']
- categories = ['storage']
+ owners = ["user", "node"]
+ categories = ["storage"]
@property
def get_hgrid_data(self):
@@ -25,18 +24,23 @@ def get_hgrid_data(self):
@property
def routes(self):
from . import routes
+
return [routes.auth_routes, routes.api_routes]
- FOLDER_SELECTED = 'dropbox_folder_selected'
- NODE_AUTHORIZED = 'dropbox_node_authorized'
- NODE_DEAUTHORIZED = 'dropbox_node_deauthorized'
+ FOLDER_SELECTED = "dropbox_folder_selected"
+ NODE_AUTHORIZED = "dropbox_node_authorized"
+ NODE_DEAUTHORIZED = "dropbox_node_deauthorized"
- actions = (FOLDER_SELECTED, NODE_AUTHORIZED, NODE_DEAUTHORIZED, )
+ actions = (
+ FOLDER_SELECTED,
+ NODE_AUTHORIZED,
+ NODE_DEAUTHORIZED,
+ )
@property
def user_settings(self):
- return self.get_model('UserSettings')
+ return self.get_model("UserSettings")
@property
def node_settings(self):
- return self.get_model('NodeSettings')
+ return self.get_model("NodeSettings")
diff --git a/addons/dropbox/models.py b/addons/dropbox/models.py
index d28d761dcd9..abf3b00ff79 100644
--- a/addons/dropbox/models.py
+++ b/addons/dropbox/models.py
@@ -2,8 +2,11 @@
import logging
import os
-from addons.base.models import (BaseOAuthNodeSettings, BaseOAuthUserSettings,
- BaseStorageAddon)
+from addons.base.models import (
+ BaseOAuthNodeSettings,
+ BaseOAuthUserSettings,
+ BaseStorageAddon,
+)
from django.db import models
from dropbox import Dropbox
from dropbox.exceptions import ApiError, DropboxException
@@ -23,7 +26,7 @@
class DropboxFileNode(BaseFileNode):
- _provider = 'dropbox'
+ _provider = "dropbox"
class DropboxFolder(DropboxFileNode, Folder):
@@ -34,14 +37,18 @@ class DropboxFile(DropboxFileNode, File):
@property
def _hashes(self):
try:
- return {'Dropbox content_hash': self._history[-1]['extra']['hashes']['dropbox']}
+ return {
+ "Dropbox content_hash": self._history[-1]["extra"]["hashes"][
+ "dropbox"
+ ]
+ }
except (IndexError, KeyError):
return None
class Provider(ExternalProvider):
- name = 'Dropbox'
- short_name = 'dropbox'
+ name = "Dropbox"
+ short_name = "dropbox"
client_id = settings.DROPBOX_KEY
client_secret = settings.DROPBOX_SECRET
@@ -56,16 +63,16 @@ def auth_url(self):
# Dropbox requires explicitly requesting refresh_tokens via `token_access_type`
# https://developers.dropbox.com/oauth-guide#implementing-oauth
url = super().auth_url
- return furl(url).add({'token_access_type': 'offline'}).url
+ return furl(url).add({"token_access_type": "offline"}).url
def handle_callback(self, response):
- access_token = response['access_token']
+ access_token = response["access_token"]
self.client = Dropbox(access_token)
info = self.client.users_get_current_account()
return {
- 'key': access_token,
- 'provider_id': info.account_id,
- 'display_name': info.name.display_name,
+ "key": access_token,
+ "provider_id": info.account_id,
+ "display_name": info.name.display_name,
}
def fetch_access_token(self, force_refresh=False):
@@ -77,6 +84,7 @@ class UserSettings(BaseOAuthUserSettings):
"""Stores user-specific dropbox information.
token.
"""
+
oauth_provider = Provider
serializer = DropboxSerializer
@@ -97,7 +105,9 @@ class NodeSettings(BaseOAuthNodeSettings, BaseStorageAddon):
serializer = DropboxSerializer
folder = models.TextField(null=True, blank=True)
- user_settings = models.ForeignKey(UserSettings, null=True, blank=True, on_delete=models.CASCADE)
+ user_settings = models.ForeignKey(
+ UserSettings, null=True, blank=True, on_delete=models.CASCADE
+ )
_api = None
@@ -114,7 +124,11 @@ def folder_id(self):
@property
def folder_name(self):
- return os.path.split(self.folder or '')[1] or '/ (Full Dropbox)' if self.folder else None
+ return (
+ os.path.split(self.folder or "")[1] or "/ (Full Dropbox)"
+ if self.folder
+ else None
+ )
@property
def folder_path(self):
@@ -122,7 +136,7 @@ def folder_path(self):
@property
def display_name(self):
- return f'{self.config.full_name}: {self.folder}'
+ return f"{self.config.full_name}: {self.folder}"
def fetch_access_token(self):
return self.api.fetch_access_token()
@@ -131,48 +145,59 @@ def clear_settings(self):
self.folder = None
def get_folders(self, **kwargs):
- folder_id = kwargs.get('folder_id')
+ folder_id = kwargs.get("folder_id")
if folder_id is None:
- return [{
- 'addon': 'dropbox',
- 'id': '/',
- 'path': '/',
- 'kind': 'folder',
- 'name': '/ (Full Dropbox)',
- 'urls': {
- 'folders': api_v2_url(f'nodes/{self.owner._id}/addons/dropbox/folders/', params={'id': '/'})
+ return [
+ {
+ "addon": "dropbox",
+ "id": "/",
+ "path": "/",
+ "kind": "folder",
+ "name": "/ (Full Dropbox)",
+ "urls": {
+ "folders": api_v2_url(
+ f"nodes/{self.owner._id}/addons/dropbox/folders/",
+ params={"id": "/"},
+ )
+ },
}
- }]
+ ]
client = Dropbox(self.fetch_access_token())
try:
- folder_id = '' if folder_id == '/' else folder_id
+ folder_id = "" if folder_id == "/" else folder_id
list_folder = client.files_list_folder(folder_id)
contents = [x for x in list_folder.entries]
while list_folder.has_more:
- list_folder = client.files_list_folder_continue(list_folder.cursor)
+ list_folder = client.files_list_folder_continue(
+ list_folder.cursor
+ )
contents += [x for x in list_folder.entries]
except ApiError as error:
- raise HTTPError(http_status.HTTP_400_BAD_REQUEST, data={
- 'message_short': error.user_message_text,
- 'message_long': error.user_message_text,
- })
+ raise HTTPError(
+ http_status.HTTP_400_BAD_REQUEST,
+ data={
+ "message_short": error.user_message_text,
+ "message_long": error.user_message_text,
+ },
+ )
except DropboxException:
raise HTTPError(http_status.HTTP_400_BAD_REQUEST)
return [
{
- 'addon': 'dropbox',
- 'kind': 'folder',
- 'id': item.path_display,
- 'name': item.path_display.split('/')[-1],
- 'path': item.path_display,
- 'urls': {
- 'folders': api_v2_url(
- f'nodes/{self.owner._id}/addons/dropbox/folders/', params={'id': item.path_display}
+ "addon": "dropbox",
+ "kind": "folder",
+ "id": item.path_display,
+ "name": item.path_display.split("/")[-1],
+ "path": item.path_display,
+ "urls": {
+ "folders": api_v2_url(
+ f"nodes/{self.owner._id}/addons/dropbox/folders/",
+ params={"id": item.path_display},
)
- }
+ },
}
for item in contents
if isinstance(item, FolderMetadata)
@@ -181,7 +206,7 @@ def get_folders(self, **kwargs):
def set_folder(self, folder, auth):
self.folder = folder
# Add log to node
- self.nodelogger.log(action='folder_selected', save=True)
+ self.nodelogger.log(action="folder_selected", save=True)
def deauthorize(self, auth=None, add_log=True):
"""Remove user authorization from this node and log the event."""
@@ -189,44 +214,43 @@ def deauthorize(self, auth=None, add_log=True):
self.clear_settings()
if add_log:
- extra = {'folder': folder}
- self.nodelogger.log(action='node_deauthorized', extra=extra, save=True)
+ extra = {"folder": folder}
+ self.nodelogger.log(
+ action="node_deauthorized", extra=extra, save=True
+ )
self.clear_auth()
def serialize_waterbutler_credentials(self):
if not self.has_auth:
- raise exceptions.AddonError('Addon is not authorized')
- return {'token': self.fetch_access_token()}
+ raise exceptions.AddonError("Addon is not authorized")
+ return {"token": self.fetch_access_token()}
def serialize_waterbutler_settings(self):
if not self.folder:
- raise exceptions.AddonError('Folder is not configured')
- return {'folder': self.folder}
+ raise exceptions.AddonError("Folder is not configured")
+ return {"folder": self.folder}
def create_waterbutler_log(self, auth, action, metadata):
url = self.owner.web_url_for(
- 'addon_view_or_download_file',
- path=metadata['path'].strip('/'),
- provider='dropbox'
+ "addon_view_or_download_file",
+ path=metadata["path"].strip("/"),
+ provider="dropbox",
)
self.owner.add_log(
- f'dropbox_{action}',
+ f"dropbox_{action}",
auth=auth,
params={
- 'project': self.owner.parent_id,
- 'node': self.owner._id,
- 'path': metadata['path'],
- 'folder': self.folder,
- 'urls': {
- 'view': url,
- 'download': url + '?action=download'
- },
+ "project": self.owner.parent_id,
+ "node": self.owner._id,
+ "path": metadata["path"],
+ "folder": self.folder,
+ "urls": {"view": url, "download": url + "?action=download"},
},
)
def __repr__(self):
- return f''
+ return f""
##### Callback overrides #####
def after_delete(self, user):
diff --git a/addons/dropbox/routes.py b/addons/dropbox/routes.py
index 91a51d784ae..a1ef17f73b3 100644
--- a/addons/dropbox/routes.py
+++ b/addons/dropbox/routes.py
@@ -1,66 +1,70 @@
"""Dropbox addon routes."""
+
from framework.routing import Rule, json_renderer
from addons.dropbox import views
auth_routes = {
- 'rules': [
-
+ "rules": [
Rule(
- '/settings/dropbox/accounts/',
- 'get',
+ "/settings/dropbox/accounts/",
+ "get",
views.dropbox_account_list,
json_renderer,
)
],
- 'prefix': '/api/v1'
+ "prefix": "/api/v1",
}
api_routes = {
- 'rules': [
-
+ "rules": [
##### Node settings #####
-
Rule(
- ['/project//dropbox/config/',
- '/project//node//dropbox/config/'],
- 'get',
+ [
+ "/project//dropbox/config/",
+ "/project//node//dropbox/config/",
+ ],
+ "get",
views.dropbox_get_config,
- json_renderer
+ json_renderer,
),
-
Rule(
- ['/project//dropbox/config/',
- '/project//node//dropbox/config/'],
- 'put',
+ [
+ "/project//dropbox/config/",
+ "/project//node//dropbox/config/",
+ ],
+ "put",
views.dropbox_set_config,
- json_renderer
+ json_renderer,
),
Rule(
- ['/project//dropbox/config/',
- '/project//node//dropbox/config/'],
- 'delete',
+ [
+ "/project//dropbox/config/",
+ "/project//node//dropbox/config/",
+ ],
+ "delete",
views.dropbox_deauthorize_node,
- json_renderer
+ json_renderer,
),
-
Rule(
- ['/project//dropbox/config/import-auth/',
- '/project//node//dropbox/config/import-auth/'],
- 'put',
+ [
+ "/project//dropbox/config/import-auth/",
+ "/project//node//dropbox/config/import-auth/",
+ ],
+ "put",
views.dropbox_import_auth,
- json_renderer
+ json_renderer,
),
-
Rule(
- ['/project//dropbox/folders/',
- '/project//node//dropbox/folders/'],
- 'get',
+ [
+ "/project//dropbox/folders/",
+ "/project//node//dropbox/folders/",
+ ],
+ "get",
views.dropbox_folder_list,
- json_renderer
+ json_renderer,
),
-
],
- 'prefix': '/api/v1'
+ "prefix": "/api/v1",
}
diff --git a/addons/dropbox/serializer.py b/addons/dropbox/serializer.py
index 0fa89e808a1..4e13825cbe5 100644
--- a/addons/dropbox/serializer.py
+++ b/addons/dropbox/serializer.py
@@ -6,12 +6,13 @@
class DropboxSerializer(StorageAddonSerializer):
-
- addon_short_name = 'dropbox'
+ addon_short_name = "dropbox"
def credentials_are_valid(self, user_settings, client):
if user_settings:
- client = client or Dropbox(user_settings.external_accounts[0].oauth_key)
+ client = client or Dropbox(
+ user_settings.external_accounts[0].oauth_key
+ )
try:
client.users_get_current_account()
except (AssertionError, DropboxException):
@@ -21,8 +22,8 @@ def credentials_are_valid(self, user_settings, client):
def serialized_folder(self, node_settings):
path = node_settings.folder
return {
- 'name': path if path != '/' else '/ (Full Dropbox)',
- 'path': path
+ "name": path if path != "/" else "/ (Full Dropbox)",
+ "path": path,
}
@property
@@ -30,12 +31,11 @@ def addon_serialized_urls(self):
node = self.node_settings.owner
return {
- 'auth': api_url_for('oauth_connect',
- service_name='dropbox'),
- 'importAuth': node.api_url_for('dropbox_import_auth'),
- 'files': node.web_url_for('collect_file_trees'),
- 'folders': node.api_url_for('dropbox_folder_list'),
- 'config': node.api_url_for('dropbox_set_config'),
- 'deauthorize': node.api_url_for('dropbox_deauthorize_node'),
- 'accounts': node.api_url_for('dropbox_account_list'),
+ "auth": api_url_for("oauth_connect", service_name="dropbox"),
+ "importAuth": node.api_url_for("dropbox_import_auth"),
+ "files": node.web_url_for("collect_file_trees"),
+ "folders": node.api_url_for("dropbox_folder_list"),
+ "config": node.api_url_for("dropbox_set_config"),
+ "deauthorize": node.api_url_for("dropbox_deauthorize_node"),
+ "accounts": node.api_url_for("dropbox_account_list"),
}
diff --git a/addons/dropbox/settings/__init__.py b/addons/dropbox/settings/__init__.py
index eb5d40c3725..8ef6b3f33e5 100644
--- a/addons/dropbox/settings/__init__.py
+++ b/addons/dropbox/settings/__init__.py
@@ -6,4 +6,4 @@
try:
from .local import * # noqa
except ImportError:
- logger.warning('No local.py settings file found')
+ logger.warning("No local.py settings file found")
diff --git a/addons/dropbox/settings/defaults.py b/addons/dropbox/settings/defaults.py
index e44e7a41eb0..f4a1f5d7459 100644
--- a/addons/dropbox/settings/defaults.py
+++ b/addons/dropbox/settings/defaults.py
@@ -2,9 +2,9 @@
DROPBOX_KEY = None
DROPBOX_SECRET = None
-DROPBOX_AUTH_CSRF_TOKEN = 'dropbox-auth-csrf-token'
-DROPBOX_OAUTH_AUTH_ENDPOINT = 'https://www.dropbox.com/oauth2/authorize'
-DROPBOX_OAUTH_TOKEN_ENDPOINT = 'https://www.dropbox.com/oauth2/token'
+DROPBOX_AUTH_CSRF_TOKEN = "dropbox-auth-csrf-token"
+DROPBOX_OAUTH_AUTH_ENDPOINT = "https://www.dropbox.com/oauth2/authorize"
+DROPBOX_OAUTH_TOKEN_ENDPOINT = "https://www.dropbox.com/oauth2/token"
REFRESH_TIME = 14399 # 4 hours
# Max file size permitted by frontend in megabytes
diff --git a/addons/dropbox/settings/local-dist.py b/addons/dropbox/settings/local-dist.py
index 1e0bfb434fb..3ced2d9b8e1 100644
--- a/addons/dropbox/settings/local-dist.py
+++ b/addons/dropbox/settings/local-dist.py
@@ -1,6 +1,7 @@
"""Example Dropbox local settings file. Copy this file to local.py and change
these settings.
"""
+
# Get an app key and secret at https://www.dropbox.com/developers/apps
-DROPBOX_KEY = 'changeme'
-DROPBOX_SECRET = 'changeme'
+DROPBOX_KEY = "changeme"
+DROPBOX_SECRET = "changeme"
diff --git a/addons/dropbox/tests/factories.py b/addons/dropbox/tests/factories.py
index 4962ccc586c..d1be51bcac8 100644
--- a/addons/dropbox/tests/factories.py
+++ b/addons/dropbox/tests/factories.py
@@ -1,6 +1,10 @@
import factory
from factory.django import DjangoModelFactory
-from osf_tests.factories import UserFactory, ProjectFactory, ExternalAccountFactory
+from osf_tests.factories import (
+ UserFactory,
+ ProjectFactory,
+ ExternalAccountFactory,
+)
from addons.dropbox.models import NodeSettings
from addons.dropbox.models import UserSettings
@@ -18,12 +22,13 @@ class DropboxNodeSettingsFactory(DjangoModelFactory):
class Meta:
model = NodeSettings
- folder = 'Camera Uploads'
+ folder = "Camera Uploads"
owner = factory.SubFactory(ProjectFactory)
user_settings = factory.SubFactory(DropboxUserSettingsFactory)
external_account = factory.SubFactory(ExternalAccountFactory)
+
class DropboxAccountFactory(ExternalAccountFactory):
- provider = 'dropbox'
- provider_id = factory.Sequence(lambda n: f'id-{n}')
- oauth_key = factory.Sequence(lambda n: f'key-{n}')
+ provider = "dropbox"
+ provider_id = factory.Sequence(lambda n: f"id-{n}")
+ oauth_key = factory.Sequence(lambda n: f"key-{n}")
diff --git a/addons/dropbox/tests/test_client.py b/addons/dropbox/tests/test_client.py
index 5d193ab6d62..4255223c70d 100644
--- a/addons/dropbox/tests/test_client.py
+++ b/addons/dropbox/tests/test_client.py
@@ -8,19 +8,17 @@
class TestCore(unittest.TestCase):
-
def setUp(self):
-
super().setUp()
self.user = UserFactory()
- self.user.add_addon('dropbox')
+ self.user.add_addon("dropbox")
self.user.save()
- self.settings = self.user.get_addon('dropbox')
- self.settings.access_token = '12345'
+ self.settings = self.user.get_addon("dropbox")
+ self.settings.access_token = "12345"
self.settings.save()
def test_get_addon_returns_dropbox_user_settings(self):
- result = self.user.get_addon('dropbox')
+ result = self.user.get_addon("dropbox")
assert isinstance(result, UserSettings)
diff --git a/addons/dropbox/tests/test_models.py b/addons/dropbox/tests/test_models.py
index 669fc2cbab2..37cb273eb0d 100644
--- a/addons/dropbox/tests/test_models.py
+++ b/addons/dropbox/tests/test_models.py
@@ -2,17 +2,21 @@
from unittest import mock
import pytest
-from addons.base.tests.models import (OAuthAddonNodeSettingsTestSuiteMixin,
- OAuthAddonUserSettingTestSuiteMixin)
+from addons.base.tests.models import (
+ OAuthAddonNodeSettingsTestSuiteMixin,
+ OAuthAddonUserSettingTestSuiteMixin,
+)
from addons.dropbox.models import NodeSettings
from addons.dropbox.tests import factories
pytestmark = pytest.mark.django_db
-class TestDropboxNodeSettings(OAuthAddonNodeSettingsTestSuiteMixin, unittest.TestCase):
- full_name = 'dropbox'
- short_name = 'dropbox'
+class TestDropboxNodeSettings(
+ OAuthAddonNodeSettingsTestSuiteMixin, unittest.TestCase
+):
+ full_name = "dropbox"
+ short_name = "dropbox"
ExternalAccountFactory = factories.DropboxAccountFactory
NodeSettingsClass = NodeSettings
@@ -21,29 +25,30 @@ class TestDropboxNodeSettings(OAuthAddonNodeSettingsTestSuiteMixin, unittest.Tes
def _node_settings_class_kwargs(self, node, user_settings):
return {
- 'user_settings': self.user_settings,
- 'folder': '1234567890',
- 'owner': self.node
+ "user_settings": self.user_settings,
+ "folder": "1234567890",
+ "owner": self.node,
}
def test_folder_defaults_to_none(self):
node_settings = NodeSettings(
- owner=factories.ProjectFactory(),
- user_settings=self.user_settings
+ owner=factories.ProjectFactory(), user_settings=self.user_settings
)
node_settings.save()
assert node_settings.folder is None
@mock.patch(
- 'addons.dropbox.models.UserSettings.revoke_remote_oauth_access',
- mock.PropertyMock()
+ "addons.dropbox.models.UserSettings.revoke_remote_oauth_access",
+ mock.PropertyMock(),
)
def test_complete_has_auth_not_verified(self):
super().test_complete_has_auth_not_verified()
-class TestDropboxUserSettings(OAuthAddonUserSettingTestSuiteMixin, unittest.TestCase):
- full_name = 'dropbox'
- short_name = 'dropbox'
+class TestDropboxUserSettings(
+ OAuthAddonUserSettingTestSuiteMixin, unittest.TestCase
+):
+ full_name = "dropbox"
+ short_name = "dropbox"
ExternalAccountFactory = factories.DropboxAccountFactory
diff --git a/addons/dropbox/tests/test_serializers.py b/addons/dropbox/tests/test_serializers.py
index c8c42bd6893..e27d3f8fa4f 100644
--- a/addons/dropbox/tests/test_serializers.py
+++ b/addons/dropbox/tests/test_serializers.py
@@ -1,4 +1,5 @@
"""Serializer tests for the Dropbox addon."""
+
import pytest
from tests.base import OsfTestCase
@@ -15,7 +16,7 @@
class TestDropboxSerializer(StorageAddonSerializerTestSuiteMixin, OsfTestCase):
- addon_short_name = 'dropbox'
+ addon_short_name = "dropbox"
ExternalAccountFactory = DropboxAccountFactory
Serializer = DropboxSerializer
diff --git a/addons/dropbox/tests/test_views.py b/addons/dropbox/tests/test_views.py
index d9c2cede973..1c8b3c0d8ab 100644
--- a/addons/dropbox/tests/test_views.py
+++ b/addons/dropbox/tests/test_views.py
@@ -1,4 +1,5 @@
"""Views tests for the Dropbox addon."""
+
from rest_framework import status as http_status
import unittest
@@ -25,144 +26,179 @@
pytestmark = pytest.mark.django_db
-class TestAuthViews(DropboxAddonTestCase, views_testing.OAuthAddonAuthViewsTestCaseMixin, OsfTestCase):
-
+class TestAuthViews(
+ DropboxAddonTestCase,
+ views_testing.OAuthAddonAuthViewsTestCaseMixin,
+ OsfTestCase,
+):
@mock.patch(
- 'addons.dropbox.models.Provider.auth_url',
- mock.PropertyMock(return_value='http://api.foo.com')
+ "addons.dropbox.models.Provider.auth_url",
+ mock.PropertyMock(return_value="http://api.foo.com"),
)
def test_oauth_start(self):
super().test_oauth_start()
- @mock.patch('addons.dropbox.models.UserSettings.revoke_remote_oauth_access', mock.PropertyMock())
+ @mock.patch(
+ "addons.dropbox.models.UserSettings.revoke_remote_oauth_access",
+ mock.PropertyMock(),
+ )
def test_delete_external_account(self):
super().test_delete_external_account()
-class TestConfigViews(DropboxAddonTestCase, views_testing.OAuthAddonConfigViewsTestCaseMixin, OsfTestCase):
-
- folder = {
- 'path': '12234',
- 'id': '12234'
- }
+class TestConfigViews(
+ DropboxAddonTestCase,
+ views_testing.OAuthAddonConfigViewsTestCaseMixin,
+ OsfTestCase,
+):
+ folder = {"path": "12234", "id": "12234"}
Serializer = DropboxSerializer
client = mock_client
- @mock.patch('addons.dropbox.models.Dropbox', return_value=mock_client)
+ @mock.patch("addons.dropbox.models.Dropbox", return_value=mock_client)
def test_folder_list(self, *args):
super().test_folder_list()
- @mock.patch.object(DropboxSerializer, 'credentials_are_valid', return_value=True)
+ @mock.patch.object(
+ DropboxSerializer, "credentials_are_valid", return_value=True
+ )
def test_import_auth(self, *args):
super().test_import_auth()
class TestFilebrowserViews(DropboxAddonTestCase, OsfTestCase):
-
def setUp(self):
super().setUp()
- self.user.add_addon('dropbox')
- self.node_settings.external_account = self.user_settings.external_accounts[0]
+ self.user.add_addon("dropbox")
+ self.node_settings.external_account = (
+ self.user_settings.external_accounts[0]
+ )
self.node_settings.save()
- @mock.patch('addons.dropbox.models.FolderMetadata', new=MockFolderMetadata)
+ @mock.patch("addons.dropbox.models.FolderMetadata", new=MockFolderMetadata)
def test_dropbox_folder_list(self):
- with patch_client('addons.dropbox.models.Dropbox'):
+ with patch_client("addons.dropbox.models.Dropbox"):
url = self.project.api_url_for(
- 'dropbox_folder_list',
- folder_id='/',
+ "dropbox_folder_list",
+ folder_id="/",
)
res = self.app.get(url, auth=self.user.auth)
- contents = [x for x in mock_client.files_list_folder('').entries if isinstance(x, MockFolderMetadata)]
+ contents = [
+ x
+ for x in mock_client.files_list_folder("").entries
+ if isinstance(x, MockFolderMetadata)
+ ]
first = res.json[0]
assert len(res.json) == len(contents)
- assert 'kind' in first
- assert first['path'] == contents[0].path_display
-
- @mock.patch('addons.dropbox.models.FolderMetadata', new=MockFolderMetadata)
- @mock.patch('addons.dropbox.models.Dropbox.files_list_folder_continue')
- @mock.patch('addons.dropbox.models.Dropbox.files_list_folder')
- def test_dropbox_folder_list_has_more(self, mock_list_folder, mock_list_folder_continue):
+ assert "kind" in first
+ assert first["path"] == contents[0].path_display
+
+ @mock.patch("addons.dropbox.models.FolderMetadata", new=MockFolderMetadata)
+ @mock.patch("addons.dropbox.models.Dropbox.files_list_folder_continue")
+ @mock.patch("addons.dropbox.models.Dropbox.files_list_folder")
+ def test_dropbox_folder_list_has_more(
+ self, mock_list_folder, mock_list_folder_continue
+ ):
mock_list_folder.return_value = MockListFolderResult(has_more=True)
mock_list_folder_continue.return_value = MockListFolderResult()
url = self.project.api_url_for(
- 'dropbox_folder_list',
- folder_id='/',
+ "dropbox_folder_list",
+ folder_id="/",
)
res = self.app.get(url, auth=self.user.auth)
contents = [
- each for each in
- (mock_client.files_list_folder('').entries + mock_client.files_list_folder_continue('').entries)
+ each
+ for each in (
+ mock_client.files_list_folder("").entries
+ + mock_client.files_list_folder_continue("").entries
+ )
if isinstance(each, MockFolderMetadata)
]
- mock_list_folder.assert_called_once_with('')
- mock_list_folder_continue.assert_called_once_with('ZtkX9_EHj3x7PMkVuFIhwKYXEpwpLwyxp9vMKomUhllil9q7eWiAu')
+ mock_list_folder.assert_called_once_with("")
+ mock_list_folder_continue.assert_called_once_with(
+ "ZtkX9_EHj3x7PMkVuFIhwKYXEpwpLwyxp9vMKomUhllil9q7eWiAu"
+ )
assert len(res.json) == 2
assert len(res.json) == len(contents)
def test_dropbox_folder_list_if_folder_is_none_and_folders_only(self):
- with patch_client('addons.dropbox.models.Dropbox'):
+ with patch_client("addons.dropbox.models.Dropbox"):
self.node_settings.folder = None
self.node_settings.save()
- url = self.project.api_url_for('dropbox_folder_list')
+ url = self.project.api_url_for("dropbox_folder_list")
res = self.app.get(url, auth=self.user.auth)
- contents = mock_client.files_list_folder('').entries
- expected = [each for each in contents if isinstance(each, MockFolderMetadata)]
+ contents = mock_client.files_list_folder("").entries
+ expected = [
+ each
+ for each in contents
+ if isinstance(each, MockFolderMetadata)
+ ]
assert len(res.json) == len(expected)
def test_dropbox_folder_list_folders_only(self):
- with patch_client('addons.dropbox.models.Dropbox'):
- url = self.project.api_url_for('dropbox_folder_list')
+ with patch_client("addons.dropbox.models.Dropbox"):
+ url = self.project.api_url_for("dropbox_folder_list")
res = self.app.get(url, auth=self.user.auth)
- contents = mock_client.files_list_folder('').entries
- expected = [each for each in contents if isinstance(each, MockFolderMetadata)]
+ contents = mock_client.files_list_folder("").entries
+ expected = [
+ each
+ for each in contents
+ if isinstance(each, MockFolderMetadata)
+ ]
assert len(res.json) == len(expected)
- @mock.patch('addons.dropbox.models.Dropbox.files_list_folder')
+ @mock.patch("addons.dropbox.models.Dropbox.files_list_folder")
def test_dropbox_folder_list_include_root(self, mock_metadata):
- with patch_client('addons.dropbox.models.Dropbox'):
- url = self.project.api_url_for('dropbox_folder_list')
+ with patch_client("addons.dropbox.models.Dropbox"):
+ url = self.project.api_url_for("dropbox_folder_list")
res = self.app.get(url, auth=self.user.auth)
- contents = mock_client.files_list_folder('').entries
+ contents = mock_client.files_list_folder("").entries
assert len(res.json) == 1
assert len(res.json) != len(contents)
- assert res.json[0]['path'] == '/'
+ assert res.json[0]["path"] == "/"
- @unittest.skip('finish this')
+ @unittest.skip("finish this")
def test_dropbox_root_folder(self):
- assert 0, 'finish me'
+ assert 0, "finish me"
def test_dropbox_root_folder_if_folder_is_none(self):
# Something is returned on normal circumstances
- with mock.patch.object(type(self.node_settings), 'has_auth', True):
- root = dropbox_root_folder(node_settings=self.node_settings, auth=self.user.auth)
+ with mock.patch.object(type(self.node_settings), "has_auth", True):
+ root = dropbox_root_folder(
+ node_settings=self.node_settings, auth=self.user.auth
+ )
assert root is not None
# Nothing is returned when there is no folder linked
self.node_settings.folder = None
self.node_settings.save()
- with mock.patch.object(type(self.node_settings), 'has_auth', True):
- root = dropbox_root_folder(node_settings=self.node_settings, auth=self.user.auth)
+ with mock.patch.object(type(self.node_settings), "has_auth", True):
+ root = dropbox_root_folder(
+ node_settings=self.node_settings, auth=self.user.auth
+ )
assert root is None
- @mock.patch('addons.dropbox.models.Dropbox.files_list_folder')
- def test_dropbox_folder_list_returns_error_if_invalid_path(self, mock_metadata):
+ @mock.patch("addons.dropbox.models.Dropbox.files_list_folder")
+ def test_dropbox_folder_list_returns_error_if_invalid_path(
+ self, mock_metadata
+ ):
mock_error = mock.Mock()
- mock_metadata.side_effect = ApiError('', mock_error, '', '')
- url = self.project.api_url_for('dropbox_folder_list', folder_id='/fake_path')
- with mock.patch.object(type(self.node_settings), 'has_auth', True):
+ mock_metadata.side_effect = ApiError("", mock_error, "", "")
+ url = self.project.api_url_for(
+ "dropbox_folder_list", folder_id="/fake_path"
+ )
+ with mock.patch.object(type(self.node_settings), "has_auth", True):
res = self.app.get(url, auth=self.user.auth)
assert res.status_code == http_status.HTTP_400_BAD_REQUEST
class TestRestrictions(DropboxAddonTestCase, OsfTestCase):
-
def setUp(self):
super(DropboxAddonTestCase, self).setUp()
@@ -173,31 +209,32 @@ def setUp(self):
self.project.save()
# Set shared folder
- self.node_settings.folder = 'foo bar/bar'
+ self.node_settings.folder = "foo bar/bar"
self.node_settings.save()
- @mock.patch('addons.dropbox.models.Dropbox.files_list_folder')
+ @mock.patch("addons.dropbox.models.Dropbox.files_list_folder")
def test_restricted_folder_list(self, mock_metadata):
mock_metadata.return_value = MockListFolderResult()
# tries to access a parent folder
- url = self.project.api_url_for('dropbox_folder_list',
- path='foo bar')
+ url = self.project.api_url_for("dropbox_folder_list", path="foo bar")
res = self.app.get(url, auth=self.contrib.auth)
assert res.status_code == http_status.HTTP_403_FORBIDDEN
def test_restricted_config_contrib_no_addon(self):
- url = self.project.api_url_for('dropbox_set_config')
- res = self.app.put(url, json={'selected': {'path': 'foo'}},
- auth=self.contrib.auth)
+ url = self.project.api_url_for("dropbox_set_config")
+ res = self.app.put(
+ url, json={"selected": {"path": "foo"}}, auth=self.contrib.auth
+ )
assert res.status_code == http_status.HTTP_400_BAD_REQUEST
def test_restricted_config_contrib_not_owner(self):
# Contributor has dropbox auth, but is not the node authorizer
- self.contrib.add_addon('dropbox')
+ self.contrib.add_addon("dropbox")
self.contrib.save()
- url = self.project.api_url_for('dropbox_set_config')
- res = self.app.put(url, json={'selected': {'path': 'foo'}},
- auth=self.contrib.auth)
+ url = self.project.api_url_for("dropbox_set_config")
+ res = self.app.put(
+ url, json={"selected": {"path": "foo"}}, auth=self.contrib.auth
+ )
assert res.status_code == http_status.HTTP_403_FORBIDDEN
diff --git a/addons/dropbox/tests/utils.py b/addons/dropbox/tests/utils.py
index 6ef97b047d8..8bcf3f1e348 100644
--- a/addons/dropbox/tests/utils.py
+++ b/addons/dropbox/tests/utils.py
@@ -7,55 +7,51 @@
class DropboxAddonTestCase(OAuthAddonTestCaseMixin, AddonTestCase):
-
- ADDON_SHORT_NAME = 'dropbox'
+ ADDON_SHORT_NAME = "dropbox"
ExternalAccountFactory = DropboxAccountFactory
Provider = Provider
def set_node_settings(self, settings):
super().set_node_settings(settings)
- settings.folder = 'foo'
+ settings.folder = "foo"
settings.save()
class MockFileMetadata:
-
- name = 'Prime_Numbers.txt'
- path_display = '/Homework/math/Prime_Numbers.txt'
+ name = "Prime_Numbers.txt"
+ path_display = "/Homework/math/Prime_Numbers.txt"
class MockFolderMetadata:
-
- name = 'math'
- path_display = '/Homework/math'
+ name = "math"
+ path_display = "/Homework/math"
class MockListFolderResult:
-
def __init__(self, has_more=False):
self.entries = [MockFileMetadata(), MockFolderMetadata()]
- self.cursor = 'ZtkX9_EHj3x7PMkVuFIhwKYXEpwpLwyxp9vMKomUhllil9q7eWiAu'
+ self.cursor = "ZtkX9_EHj3x7PMkVuFIhwKYXEpwpLwyxp9vMKomUhllil9q7eWiAu"
self.has_more = has_more
-class MockName:
- display_name = 'Rain Drop, Drop Box'
+class MockName:
+ display_name = "Rain Drop, Drop Box"
class MockFullAccount:
-
name = MockName()
class MockDropbox:
-
- def files_list_folder(self,
- path,
- recursive=False,
- include_media_info=False,
- include_deleted=False,
- include_has_explicit_shared_members=False):
- return MockListFolderResult()
+ def files_list_folder(
+ self,
+ path,
+ recursive=False,
+ include_media_info=False,
+ include_deleted=False,
+ include_has_explicit_shared_members=False,
+ ):
+ return MockListFolderResult()
def files_list_folder_continue(self, cursor):
return MockListFolderResult()
diff --git a/addons/dropbox/views.py b/addons/dropbox/views.py
index 6f37cbb8395..64127e676c4 100644
--- a/addons/dropbox/views.py
+++ b/addons/dropbox/views.py
@@ -1,57 +1,48 @@
"""Views fo the node settings page."""
+
from flask import request
import logging
from addons.dropbox.serializer import DropboxSerializer
from addons.base import generic_views
-from website.project.decorators import must_have_addon, must_be_addon_authorizer
+from website.project.decorators import (
+ must_have_addon,
+ must_be_addon_authorizer,
+)
logger = logging.getLogger(__name__)
debug = logger.debug
-SHORT_NAME = 'dropbox'
-FULL_NAME = 'Dropbox'
+SHORT_NAME = "dropbox"
+FULL_NAME = "Dropbox"
dropbox_account_list = generic_views.account_list(
- SHORT_NAME,
- DropboxSerializer
+ SHORT_NAME, DropboxSerializer
)
-dropbox_import_auth = generic_views.import_auth(
- SHORT_NAME,
- DropboxSerializer
-)
+dropbox_import_auth = generic_views.import_auth(SHORT_NAME, DropboxSerializer)
-@must_have_addon(SHORT_NAME, 'node')
+@must_have_addon(SHORT_NAME, "node")
@must_be_addon_authorizer(SHORT_NAME)
def dropbox_folder_list(node_addon, **kwargs):
- """ Returns all the subsequent folders under the folder id passed.
- """
- folder_id = request.args.get('folder_id')
+ """Returns all the subsequent folders under the folder id passed."""
+ folder_id = request.args.get("folder_id")
return node_addon.get_folders(folder_id=folder_id)
-dropbox_get_config = generic_views.get_config(
- SHORT_NAME,
- DropboxSerializer
-)
+dropbox_get_config = generic_views.get_config(SHORT_NAME, DropboxSerializer)
def _set_folder(node_addon, folder, auth):
- uid = folder['id']
+ uid = folder["id"]
node_addon.set_folder(uid, auth=auth)
node_addon.save()
dropbox_set_config = generic_views.set_config(
- SHORT_NAME,
- FULL_NAME,
- DropboxSerializer,
- _set_folder
+ SHORT_NAME, FULL_NAME, DropboxSerializer, _set_folder
)
-dropbox_deauthorize_node = generic_views.deauthorize_node(
- SHORT_NAME
-)
+dropbox_deauthorize_node = generic_views.deauthorize_node(SHORT_NAME)
diff --git a/addons/figshare/apps.py b/addons/figshare/apps.py
index 895941a60b3..b0c96f49f23 100644
--- a/addons/figshare/apps.py
+++ b/addons/figshare/apps.py
@@ -3,6 +3,7 @@
from website.util import rubeus
+
def figshare_root_folder(node_settings, auth, **kwargs):
"""Return the Rubeus/HGrid-formatted response for the root folder only.
@@ -14,26 +15,28 @@ def figshare_root_folder(node_settings, auth, **kwargs):
if not node_settings.has_auth or not node_settings.folder_id:
return None
node = node_settings.owner
- return [rubeus.build_addon_root(
- node_settings=node_settings,
- name=node_settings.fetch_folder_name(),
- permissions=auth,
- nodeUrl=node.url,
- nodeApiUrl=node.api_url,
- rootFolderType=node_settings.folder_path,
- private_key=kwargs.get('view_only', None),
- )]
+ return [
+ rubeus.build_addon_root(
+ node_settings=node_settings,
+ name=node_settings.fetch_folder_name(),
+ permissions=auth,
+ nodeUrl=node.url,
+ nodeApiUrl=node.api_url,
+ rootFolderType=node_settings.folder_path,
+ private_key=kwargs.get("view_only", None),
+ )
+ ]
-class FigshareAddonAppConfig(BaseAddonAppConfig):
+class FigshareAddonAppConfig(BaseAddonAppConfig):
default = True
- name = 'addons.figshare'
- label = 'addons_figshare'
- full_name = 'figshare'
- short_name = 'figshare'
- owners = ['user', 'node']
- configs = ['accounts', 'node']
- categories = ['storage']
+ name = "addons.figshare"
+ label = "addons_figshare"
+ full_name = "figshare"
+ short_name = "figshare"
+ owners = ["user", "node"]
+ configs = ["accounts", "node"]
+ categories = ["storage"]
has_hgrid_files = True
max_file_size = MAX_UPLOAD_SIZE
@@ -41,14 +44,14 @@ class FigshareAddonAppConfig(BaseAddonAppConfig):
def get_hgrid_data(self):
return figshare_root_folder
- FIGSHARE_FOLDER_CREATED = 'figshare_folder_created'
- FIGSHARE_FOLDER_SELECTED = 'figshare_folder_selected'
- FIGSHARE_CONTENT = 'figshare_content_unlinked'
- FIGSHARE_FILE_ADDED = 'figshare_file_added'
- FIGSHARE_FILE_REMOVED = 'figshare_file_removed'
- FIGSHARE_NODE_AUTHORIZED = 'figshare_node_authorized'
- FIGSHARE_NODE_DEAUTHORIZED = 'figshare_node_deauthorized'
- FIGSHARE_NODE_DEAUTHORIZED_NO_USER = 'figshare_node_deauthorized_no_user'
+ FIGSHARE_FOLDER_CREATED = "figshare_folder_created"
+ FIGSHARE_FOLDER_SELECTED = "figshare_folder_selected"
+ FIGSHARE_CONTENT = "figshare_content_unlinked"
+ FIGSHARE_FILE_ADDED = "figshare_file_added"
+ FIGSHARE_FILE_REMOVED = "figshare_file_removed"
+ FIGSHARE_NODE_AUTHORIZED = "figshare_node_authorized"
+ FIGSHARE_NODE_DEAUTHORIZED = "figshare_node_deauthorized"
+ FIGSHARE_NODE_DEAUTHORIZED_NO_USER = "figshare_node_deauthorized_no_user"
actions = (
FIGSHARE_FOLDER_CREATED,
@@ -58,17 +61,19 @@ def get_hgrid_data(self):
FIGSHARE_FILE_REMOVED,
FIGSHARE_NODE_AUTHORIZED,
FIGSHARE_NODE_DEAUTHORIZED,
- FIGSHARE_NODE_DEAUTHORIZED_NO_USER)
+ FIGSHARE_NODE_DEAUTHORIZED_NO_USER,
+ )
@property
def routes(self):
from . import routes
+
return [routes.api_routes]
@property
def user_settings(self):
- return self.get_model('UserSettings')
+ return self.get_model("UserSettings")
@property
def node_settings(self):
- return self.get_model('NodeSettings')
+ return self.get_model("NodeSettings")
diff --git a/addons/figshare/client.py b/addons/figshare/client.py
index a45242b4d44..fdfd78bb2f2 100644
--- a/addons/figshare/client.py
+++ b/addons/figshare/client.py
@@ -6,7 +6,6 @@
class FigshareClient(BaseClient):
-
def __init__(self, access_token):
self.access_token = access_token
@@ -20,128 +19,152 @@ def from_account(cls, account):
@property
def _default_headers(self):
if self.access_token:
- return {'Authorization': f'token {self.access_token}'}
+ return {"Authorization": f"token {self.access_token}"}
return {}
@property
def _default_params(self):
- return {'page_size': 100}
+ return {"page_size": 100}
def userinfo(self):
return self._make_request(
- 'GET',
- self._build_url(settings.API_BASE_URL, 'account'),
- expects=(200, ),
- throws=HTTPError(403)
+ "GET",
+ self._build_url(settings.API_BASE_URL, "account"),
+ expects=(200,),
+ throws=HTTPError(403),
).json()
# PROJECT LEVEL API
def projects(self):
return self._make_request(
- 'GET',
- self._build_url(settings.API_BASE_URL, 'account', 'projects')
+ "GET",
+ self._build_url(settings.API_BASE_URL, "account", "projects"),
).json()
def project(self, project_id):
if not project_id:
return
project = self._make_request(
- 'GET',
- self._build_url(settings.API_BASE_URL, 'account', 'projects', project_id),
- expects=(200,)
+ "GET",
+ self._build_url(
+ settings.API_BASE_URL, "account", "projects", project_id
+ ),
+ expects=(200,),
).json()
if not project:
return
articles = self._make_request(
- 'GET',
- self._build_url(settings.API_BASE_URL, 'account', 'projects', project_id, 'articles')
+ "GET",
+ self._build_url(
+ settings.API_BASE_URL,
+ "account",
+ "projects",
+ project_id,
+ "articles",
+ ),
).json()
- project['articles'] = []
+ project["articles"] = []
if articles:
- project['articles'] = []
+ project["articles"] = []
for article in articles:
- fetched = self.article(article['id'])
+ fetched = self.article(article["id"])
if fetched:
- project['articles'].append(fetched)
+ project["articles"].append(fetched)
return project
# ARTICLE LEVEL API
def articles(self, only_folders=False):
article_list = self._make_request(
- 'GET',
- self._build_url(settings.API_BASE_URL, 'account', 'articles')
+ "GET",
+ self._build_url(settings.API_BASE_URL, "account", "articles"),
).json()
if only_folders:
- article_list = [x for x in article_list
- if x['defined_type'] in settings.FIGSHARE_FOLDER_TYPES]
- return [self.article(article['id']) for article in article_list]
+ article_list = [
+ x
+ for x in article_list
+ if x["defined_type"] in settings.FIGSHARE_FOLDER_TYPES
+ ]
+ return [self.article(article["id"]) for article in article_list]
def article_is_public(self, article_id):
- return self.article(article_id).get('is_public')
+ return self.article(article_id).get("is_public")
def project_is_public(self, project_id):
- return bool(self.project(project_id).get('date_published'))
+ return bool(self.project(project_id).get("date_published"))
def container_is_public(self, container_id, container_type):
- if container_type == 'project':
+ if container_type == "project":
return self.project_is_public(container_id)
elif container_id in settings.FIGSHARE_FOLDER_TYPES:
return self.article_is_public(container_id)
def article(self, article_id):
return self._make_request(
- 'GET',
- self._build_url(settings.API_BASE_URL, 'account', 'articles', article_id),
- expects=(200, )
+ "GET",
+ self._build_url(
+ settings.API_BASE_URL, "account", "articles", article_id
+ ),
+ expects=(200,),
).json()
# OTHER HELPERS
def get_folders(self):
- """ Return a list containing both projects and folder-like articles. """
+ """Return a list containing both projects and folder-like articles."""
projects = self.projects()
project_list = [
{
- 'name': project['title'],
- 'path': 'project',
- 'id': str(project['id']),
- 'kind': 'folder',
- 'permissions': {'view': True},
- 'addon': 'figshare',
- 'hasChildren': False
- } for project in projects
+ "name": project["title"],
+ "path": "project",
+ "id": str(project["id"]),
+ "kind": "folder",
+ "permissions": {"view": True},
+ "addon": "figshare",
+ "hasChildren": False,
+ }
+ for project in projects
]
article_list = [
{
- 'name': (article['title'] or 'untitled article'),
- 'path': settings.FIGSHARE_IDS_TO_TYPES[article['defined_type']],
- 'id': str(article['id']),
- 'kind': 'folder',
- 'permissions': {'view': True},
- 'addon': 'figshare',
- 'hasChildren': False
- } for article in self.articles(only_folders=True)
+ "name": (article["title"] or "untitled article"),
+ "path": settings.FIGSHARE_IDS_TO_TYPES[
+ article["defined_type"]
+ ],
+ "id": str(article["id"]),
+ "kind": "folder",
+ "permissions": {"view": True},
+ "addon": "figshare",
+ "hasChildren": False,
+ }
+ for article in self.articles(only_folders=True)
]
return project_list + article_list
def get_linked_folder_info(self, _id):
- """ Returns info about a linkable object -- 'project', 'dataset', or 'fileset' """
+ """Returns info about a linkable object -- 'project', 'dataset', or 'fileset'"""
ret = {}
try:
folder = self._make_request(
- 'GET',
- self._build_url(settings.API_BASE_URL, 'account', 'projects', _id),
- expects=(200, ),
- throws=HTTPError(404)
+ "GET",
+ self._build_url(
+ settings.API_BASE_URL, "account", "projects", _id
+ ),
+ expects=(200,),
+ throws=HTTPError(404),
).json()
- ret['path'] = 'project'
+ ret["path"] = "project"
except HTTPError:
folder = self.article(_id)
- if folder.get('defined_type') not in settings.FIGSHARE_FOLDER_TYPES:
+ if (
+ folder.get("defined_type")
+ not in settings.FIGSHARE_FOLDER_TYPES
+ ):
raise
- ret['path'] = settings.FIGSHARE_IDS_TO_TYPES[folder.get('defined_type')]
- ret['name'] = folder['title'] or 'untitled article'
- ret['id'] = str(_id)
+ ret["path"] = settings.FIGSHARE_IDS_TO_TYPES[
+ folder.get("defined_type")
+ ]
+ ret["name"] = folder["title"] or "untitled article"
+ ret["id"] = str(_id)
return ret
diff --git a/addons/figshare/messages.py b/addons/figshare/messages.py
index 96eca27242e..78f4fd89652 100644
--- a/addons/figshare/messages.py
+++ b/addons/figshare/messages.py
@@ -1,11 +1,13 @@
# MODEL MESSAGES :model.py
-BEFORE_PAGE_LOAD_PRIVATE_NODE_MIXED_FS = 'Warning: This OSF {category} is private but figshare project {project_id} may contain some public files or filesets.'
+BEFORE_PAGE_LOAD_PRIVATE_NODE_MIXED_FS = "Warning: This OSF {category} is private but figshare project {project_id} may contain some public files or filesets."
-BEFORE_PAGE_LOAD_PUBLIC_NODE_MIXED_FS = 'Warning: This OSF {category} is public but figshare project {project_id} may contain some private files or filesets.'
+BEFORE_PAGE_LOAD_PUBLIC_NODE_MIXED_FS = "Warning: This OSF {category} is public but figshare project {project_id} may contain some private files or filesets."
-BEFORE_PAGE_LOAD_PERM_MISMATCH = 'Warning: This OSF {category} is {node_perm}, but the figshare {folder_type} {figshare_id} is {figshare_perm}. '
+BEFORE_PAGE_LOAD_PERM_MISMATCH = "Warning: This OSF {category} is {node_perm}, but the figshare {folder_type} {figshare_id} is {figshare_perm}. "
-BEFORE_PAGE_LOAD_PUBLIC_NODE_PRIVATE_FS = 'Users can view the contents of this private figshare {folder_type}. '
+BEFORE_PAGE_LOAD_PUBLIC_NODE_PRIVATE_FS = (
+ "Users can view the contents of this private figshare {folder_type}. "
+)
# END MODEL MESSAGES
# MFR MESSAGES :views/crud.py
@@ -30,10 +32,12 @@
#
# '''
-OAUTH_INVALID = 'Your OAuth key for figshare is no longer valid. Please re-authenticate. '
+OAUTH_INVALID = (
+ "Your OAuth key for figshare is no longer valid. Please re-authenticate. "
+)
-FIGSHARE_INTERNAL_SERVER_ERROR = 'Figshare is experiencing technical problems when connecting to the OSF. Please wait while they resolve the problem or contact them at https://support.figshare.com.'
+FIGSHARE_INTERNAL_SERVER_ERROR = "Figshare is experiencing technical problems when connecting to the OSF. Please wait while they resolve the problem or contact them at https://support.figshare.com."
-FIGSHARE_UNSPECIFIED_ERROR = 'Figshare was contacted and returned with the following error message: {error_message}.'
+FIGSHARE_UNSPECIFIED_ERROR = "Figshare was contacted and returned with the following error message: {error_message}."
# END MFR MESSAGES
diff --git a/addons/figshare/models.py b/addons/figshare/models.py
index 3b758787dce..726d27922b1 100644
--- a/addons/figshare/models.py
+++ b/addons/figshare/models.py
@@ -2,7 +2,8 @@
from addons.base.models import (
BaseOAuthNodeSettings,
BaseOAuthUserSettings,
- BaseStorageAddon,)
+ BaseStorageAddon,
+)
from django.db import models
from framework.auth import Auth
from framework.exceptions import HTTPError
@@ -16,7 +17,7 @@
class FigshareFileNode(BaseFileNode):
- _provider = 'figshare'
+ _provider = "figshare"
class FigshareFolder(FigshareFileNode, Folder):
@@ -24,7 +25,7 @@ class FigshareFolder(FigshareFileNode, Folder):
class FigshareFile(FigshareFileNode, File):
- version_identifier = 'ref'
+ version_identifier = "ref"
@property
def _hashes(self):
@@ -39,8 +40,10 @@ def update(self, revision, data, user=None, save=True):
version = super().update(None, data, user=user, save=save)
# Draft files are not renderable
- if data['extra']['status'] == 'drafts':
- return (version, """
+ if data["extra"]["status"] == "drafts":
+ return (
+ version,
+ """