From f23c6b5ebca5bd0747a74c2f9fd8d8f3421f37c2 Mon Sep 17 00:00:00 2001 From: Josh Bird Date: Tue, 14 Aug 2018 10:07:08 -0400 Subject: [PATCH 1/6] Removes v0, redirect downloads Removes all v0 files. these endpoints now return 410 (gone), except for the download action, which rewrites the url to return a redirect to the v1 api endpoint. We may consider writing an nginx directive to handle this as it would be far faster. --- waterbutler/server/api/v0/__init__.py | 80 +++++++++++-- waterbutler/server/api/v0/copy.py | 52 --------- waterbutler/server/api/v0/core.py | 149 ------------------------- waterbutler/server/api/v0/crud.py | 135 ---------------------- waterbutler/server/api/v0/metadata.py | 19 ---- waterbutler/server/api/v0/move.py | 53 --------- waterbutler/server/api/v0/revisions.py | 19 ---- waterbutler/server/api/v0/zip.py | 24 ---- 8 files changed, 68 insertions(+), 463 deletions(-) delete mode 100644 waterbutler/server/api/v0/copy.py delete mode 100644 waterbutler/server/api/v0/core.py delete mode 100644 waterbutler/server/api/v0/crud.py delete mode 100644 waterbutler/server/api/v0/metadata.py delete mode 100644 waterbutler/server/api/v0/move.py delete mode 100644 waterbutler/server/api/v0/revisions.py delete mode 100644 waterbutler/server/api/v0/zip.py diff --git a/waterbutler/server/api/v0/__init__.py b/waterbutler/server/api/v0/__init__.py index a2538653a..d79148471 100644 --- a/waterbutler/server/api/v0/__init__.py +++ b/waterbutler/server/api/v0/__init__.py @@ -1,17 +1,73 @@ -from waterbutler.server.api.v0 import zip -from waterbutler.server.api.v0 import copy -from waterbutler.server.api.v0 import crud -from waterbutler.server.api.v0 import move -from waterbutler.server.api.v0 import metadata -from waterbutler.server.api.v0 import revisions + +import os +import socket +import asyncio +from http import HTTPStatus +import logging + +import tornado.web +import tornado.gen +import tornado.platform.asyncio + +from waterbutler.core import mime_types +from waterbutler.server import utils +from waterbutler.server.api.v0 import core +from waterbutler.core.streams import RequestStreamReader + + +logger = logging.getLogger(__name__) + + +def list_or_value(value): + assert isinstance(value, list) + if len(value) == 0: + return None + if len(value) == 1: + # Remove leading slashes as they break things + return value[0].decode('utf-8') + return [item.decode('utf-8') for item in value] + + +@tornado.web.stream_request_body +class DownloadRedirectHandler(core.BaseProviderHandler): + + async def prepare(self): + + # The only requests we redirect are downloads. + if self.request.method != 'GET': + raise tornado.web.HTTPError(status_code=410) + + # Get the query parameters so we can build a redirect url + self.arguments = { + key: list_or_value(value) + for key, value in self.request.query_arguments.items() + } + + resource = self.arguments['nid'] + provider = self.arguments['provider'] + path = self.arguments['path'] + direct = TRUTH_MAP[self.arguments.get('accept_url', 'true').lower()] + version = self.arguments.get('version', self.arguments.get('revision', None)) + + v1_url = '/v1/resources/{resource}/providers/{provider}{path}?{direct}{revision}'.format( + resource=resource, + provider=provider, + path=path, + direct='direct=&' if direct else '', + version='version={}&'.format(self.arguments['version']) if version else '' + ) + + logger.info('Redirecting a v0 download request to v1') + + self.redirect(v1_url, permanent=True) PREFIX = '' HANDLERS = [ - (r'/ops/copy', copy.CopyHandler), - (r'/ops/move', move.MoveHandler), - (r'/zip', zip.ZipHandler), - (r'/file', crud.CRUDHandler), - (r'/data', metadata.MetadataHandler), - (r'/revisions', revisions.RevisionHandler), + (r'/file', DownloadRedirectHandler) ] + +TRUTH_MAP = { + 'true': True, + 'false': False, +} diff --git a/waterbutler/server/api/v0/copy.py b/waterbutler/server/api/v0/copy.py deleted file mode 100644 index c75bb5e13..000000000 --- a/waterbutler/server/api/v0/copy.py +++ /dev/null @@ -1,52 +0,0 @@ -import time - -from waterbutler import tasks -from waterbutler.server.api.v0 import core -from waterbutler.core import remote_logging - - -class CopyHandler(core.BaseCrossProviderHandler): - JSON_REQUIRED = True - ACTION_MAP = { - 'POST': 'copy' - } - - async def post(self): - if not self.source_provider.can_intra_copy(self.destination_provider, self.json['source']['path']): - result = await tasks.copy.adelay({ - 'nid': self.json['source']['nid'], - 'path': self.json['source']['path'], - 'provider': self.source_provider.serialized() - }, { - 'nid': self.json['destination']['nid'], - 'path': self.json['destination']['path'], - 'provider': self.destination_provider.serialized() - }, - rename=self.json.get('rename'), - conflict=self.json.get('conflict', 'replace'), - start_time=time.time(), - request=remote_logging._serialize_request(self.request), - ) - - metadata, created = await tasks.wait_on_celery(result) - else: - metadata, created = ( - await tasks.backgrounded( - self.source_provider.copy, - self.destination_provider, - self.json['source']['path'], - self.json['destination']['path'], - rename=self.json.get('rename'), - conflict=self.json.get('conflict', 'replace'), - ) - ) - - if created: - self.set_status(201) - else: - self.set_status(200) - - self.write(metadata.serialized()) - - if self.source_provider.can_intra_copy(self.destination_provider, self.json['source']['path']): - self._send_hook('copy', metadata) diff --git a/waterbutler/server/api/v0/core.py b/waterbutler/server/api/v0/core.py deleted file mode 100644 index 3c0f88b49..000000000 --- a/waterbutler/server/api/v0/core.py +++ /dev/null @@ -1,149 +0,0 @@ -import json -import logging - -import tornado.web -import tornado.gen -import tornado.iostream -from raven.contrib.tornado import SentryMixin - -from waterbutler import tasks -from waterbutler.core import utils -from waterbutler.core import signing -from waterbutler.core import exceptions -from waterbutler.server import settings -from waterbutler.core import remote_logging -from waterbutler.server.auth import AuthHandler -from waterbutler.core.log_payload import LogPayload -from waterbutler.server import utils as server_utils - - -def list_or_value(value): - assert isinstance(value, list) - if len(value) == 0: - return None - if len(value) == 1: - # Remove leading slashes as they break things - return value[0].decode('utf-8') - return [item.decode('utf-8') for item in value] - - -logger = logging.getLogger(__name__) -auth_handler = AuthHandler(settings.AUTH_HANDLERS) -signer = signing.Signer(settings.HMAC_SECRET, settings.HMAC_ALGORITHM) - - -class BaseHandler(server_utils.CORsMixin, server_utils.UtilMixin, tornado.web.RequestHandler, SentryMixin): - """Base Handler to inherit from when defining a new view. - Handles CORs headers, additional status codes, and translating - :class:`waterbutler.core.exceptions.ProviderError`s into http responses - - .. note:: - For IE compatability passing a ?method= will cause that request, regardless of the - actual method, to be interpreted as the specified method. - """ - - ACTION_MAP = {} # type: dict - - def write_error(self, status_code, exc_info): - self.captureException(exc_info) - etype, exc, _ = exc_info - - if issubclass(etype, exceptions.PluginError): - self.set_status(int(exc.code)) - if exc.data: - self.finish(exc.data) - else: - self.finish({ - 'code': exc.code, - 'message': exc.message - }) - - elif issubclass(etype, tasks.WaitTimeOutError): - # TODO - self.set_status(202) - else: - self.finish({ - 'code': status_code, - 'message': self._reason, - }) - - -class BaseProviderHandler(BaseHandler): - - async def prepare(self): - self.arguments = { - key: list_or_value(value) - for key, value in self.request.query_arguments.items() - } - try: - self.arguments['action'] = self.ACTION_MAP[self.request.method] - except KeyError: - return - - self.payload = await auth_handler.fetch(self.request, self.arguments) - - self.provider = utils.make_provider( - self.arguments['provider'], - self.payload['auth'], - self.payload['credentials'], - self.payload['settings'], - ) - - self.path = await self.provider.validate_path(**self.arguments) - self.arguments['path'] = self.path # TODO Not this - - def _send_hook(self, action, metadata=None, path=None): - source = LogPayload(self.arguments['nid'], self.provider, metadata=metadata, path=path) - remote_logging.log_file_action(action, source=source, api_version='v0', - request=remote_logging._serialize_request(self.request), - bytes_downloaded=self.bytes_downloaded, - bytes_uploaded=self.bytes_uploaded) - - -class BaseCrossProviderHandler(BaseHandler): - JSON_REQUIRED = False - - async def prepare(self): - try: - self.action = self.ACTION_MAP[self.request.method] - except KeyError: - return - - self.source_provider = await self.make_provider(prefix='from', **self.json['source']) - self.destination_provider = await self.make_provider(prefix='to', **self.json['destination']) - - self.json['source']['path'] = await self.source_provider.validate_path(**self.json['source']) - self.json['destination']['path'] = await self.destination_provider.validate_path(**self.json['destination']) - - async def make_provider(self, provider, prefix='', **kwargs): - payload = await auth_handler.fetch( - self.request, - dict(kwargs, provider=provider, action=self.action + prefix) - ) - self.auth = payload - return utils.make_provider(provider, **payload) - - @property - def json(self): - try: - return self._json - except AttributeError: - pass - try: - self._json = json.loads(self.request.body.decode('utf-8')) - except ValueError: - if self.JSON_REQUIRED: - raise Exception # TODO - self._json = None - - return self._json - - def _send_hook(self, action, metadata): - source = LogPayload(self.json['source']['nid'], self.source_provider, - path=self.json['source']['path']) - destination = LogPayload(self.json['destination']['nid'], self.destination_provider, - metadata=metadata) - remote_logging.log_file_action(action, source=source, destination=destination, api_version='v0', - request=remote_logging._serialize_request(self.request), - bytes_downloaded=self.bytes_downloaded, - bytes_uploaded=self.bytes_uploaded) diff --git a/waterbutler/server/api/v0/crud.py b/waterbutler/server/api/v0/crud.py deleted file mode 100644 index c1a52720b..000000000 --- a/waterbutler/server/api/v0/crud.py +++ /dev/null @@ -1,135 +0,0 @@ -import os -import socket -import asyncio -from http import HTTPStatus - -import tornado.web -import tornado.gen -import tornado.platform.asyncio - -from waterbutler.core import mime_types -from waterbutler.server import utils -from waterbutler.server.api.v0 import core -from waterbutler.core.streams import RequestStreamReader - -TRUTH_MAP = { - 'true': True, - 'false': False, -} - - -@tornado.web.stream_request_body -class CRUDHandler(core.BaseProviderHandler): - - ACTION_MAP = { - 'GET': 'download', - 'PUT': 'upload', - 'DELETE': 'delete', - 'POST': 'create_folder', - } - STREAM_METHODS = ('PUT', ) - - async def prepare(self): - await super().prepare() - await self.prepare_stream() - - async def prepare_stream(self): - if self.request.method in self.STREAM_METHODS: - self.rsock, self.wsock = socket.socketpair() - - self.reader, _ = await asyncio.open_unix_connection(sock=self.rsock) - _, self.writer = await asyncio.open_unix_connection(sock=self.wsock) - - self.stream = RequestStreamReader(self.request, self.reader) - - self.uploader = asyncio.ensure_future(self.provider.upload(self.stream, - **self.arguments)) - else: - self.stream = None - - async def data_received(self, chunk): - """Note: Only called during uploads.""" - self.bytes_uploaded += len(chunk) - if self.stream: - self.writer.write(chunk) - await self.writer.drain() - - async def get(self): - """Download a file.""" - try: - self.arguments['accept_url'] = TRUTH_MAP[self.arguments.get('accept_url', 'true').lower()] - except KeyError: - raise tornado.web.HTTPError(status_code=400) - - if 'Range' in self.request.headers: - request_range = utils.parse_request_range(self.request.headers['Range']) - else: - request_range = None - - result = await self.provider.download(range=request_range, **self.arguments) - - if isinstance(result, str): - self.redirect(result) - self._send_hook('download_file', path=self.path) - return - - if getattr(result, 'partial', None): - # Use getattr here as not all stream may have a partial attribute - # Plus it fixes tests - self.set_status(206) - self.set_header('Content-Range', result.content_range) - - if result.content_type is not None: - self.set_header('Content-Type', result.content_type) - - if result.size is not None: - self.set_header('Content-Length', str(result.size)) - - # Build `Content-Disposition` header from `displayName` override, - # headers of provider response, or file path, whichever is truthy first - name = self.arguments.get('displayName') or getattr(result, 'name', None) or self.path.name - self.set_header('Content-Disposition', utils.make_disposition(name)) - - _, ext = os.path.splitext(name) - # If the file extention is in mime_types - # override the content type to fix issues with safari shoving in new file extensions - if ext in mime_types: - self.set_header('Content-Type', mime_types[ext]) - - await self.write_stream(result) - self._send_hook('download_file', path=self.path) - - async def post(self): - """Create a folder""" - metadata = await self.provider.create_folder(**self.arguments) - - self.set_status(201) - self.write(metadata.serialized()) - - self._send_hook('create_folder', metadata) - - async def put(self): - """Upload a file.""" - self.writer.write_eof() - - metadata, created = await self.uploader - - if created: - self.set_status(201) - self.write(metadata.serialized()) - - self.writer.close() - self.wsock.close() - - self._send_hook( - 'create' if created else 'update', - metadata, - ) - - async def delete(self): - """Delete a file.""" - - await self.provider.delete(**self.arguments) - self.set_status(int(HTTPStatus.NO_CONTENT)) - - self._send_hook('delete', path=self.path) diff --git a/waterbutler/server/api/v0/metadata.py b/waterbutler/server/api/v0/metadata.py deleted file mode 100644 index 4afa5a747..000000000 --- a/waterbutler/server/api/v0/metadata.py +++ /dev/null @@ -1,19 +0,0 @@ -from waterbutler.server.api.v0 import core - - -class MetadataHandler(core.BaseProviderHandler): - - ACTION_MAP = { - 'GET': 'metadata', - } - - async def get(self): - """List information about a file or folder""" - result = await self.provider.metadata(**self.arguments) - - if isinstance(result, list): - result = [m.serialized() for m in result] - else: - result = result.serialized() - - self.write({'data': result}) diff --git a/waterbutler/server/api/v0/move.py b/waterbutler/server/api/v0/move.py deleted file mode 100644 index 6955dc84f..000000000 --- a/waterbutler/server/api/v0/move.py +++ /dev/null @@ -1,53 +0,0 @@ -import time - -from waterbutler import tasks -from waterbutler.server.api.v0 import core -from waterbutler.core import remote_logging - - -class MoveHandler(core.BaseCrossProviderHandler): - JSON_REQUIRED = True - ACTION_MAP = { - 'POST': 'move' - } - - async def post(self): - if not self.source_provider.can_intra_move(self.destination_provider, self.json['source']['path']): - resp = await tasks.move.adelay({ - 'nid': self.json['source']['nid'], - 'path': self.json['source']['path'], - 'provider': self.source_provider.serialized() - }, { - 'nid': self.json['destination']['nid'], - 'path': self.json['destination']['path'], - 'provider': self.destination_provider.serialized() - }, - rename=self.json.get('rename'), - conflict=self.json.get('conflict', 'replace'), - start_time=time.time(), - request=remote_logging._serialize_request(self.request), - ) - - metadata, created = await tasks.wait_on_celery(resp) - - else: - metadata, created = ( - await tasks.backgrounded( - self.source_provider.move, - self.destination_provider, - self.json['source']['path'], - self.json['destination']['path'], - rename=self.json.get('rename'), - conflict=self.json.get('conflict', 'replace'), - ) - ) - - if created: - self.set_status(201) - else: - self.set_status(200) - - self.write(metadata.serialized()) - - if self.source_provider.can_intra_move(self.destination_provider, self.json['source']['path']): - self._send_hook('move', metadata) diff --git a/waterbutler/server/api/v0/revisions.py b/waterbutler/server/api/v0/revisions.py deleted file mode 100644 index 3ea9923c4..000000000 --- a/waterbutler/server/api/v0/revisions.py +++ /dev/null @@ -1,19 +0,0 @@ -import asyncio - -from waterbutler.server.api.v0 import core - - -class RevisionHandler(core.BaseProviderHandler): - - ACTION_MAP = { - 'GET': 'revisions', - } - - async def get(self): - """List revisions of a file""" - result = self.provider.revisions(**self.arguments) - - if asyncio.iscoroutine(result): - result = await result - - self.write({'data': [r.serialized() for r in result]}) diff --git a/waterbutler/server/api/v0/zip.py b/waterbutler/server/api/v0/zip.py deleted file mode 100644 index 9fc2377be..000000000 --- a/waterbutler/server/api/v0/zip.py +++ /dev/null @@ -1,24 +0,0 @@ -from waterbutler.server import utils -from waterbutler.server.api.v0 import core - - -class ZipHandler(core.BaseProviderHandler): - - ACTION_MAP = { - 'GET': 'download', - } - - async def get(self): - """Download as a Zip archive.""" - - zipfile_name = self.path.name or '{}-archive'.format(self.provider.NAME) - self.set_header('Content-Type', 'application/zip') - self.set_header( - 'Content-Disposition', - utils.make_disposition(zipfile_name + '.zip') - ) - - result = await self.provider.zip(**self.arguments) - - await self.write_stream(result) - self._send_hook('download_zip', path=self.path) From 14de20bc5af5360a413f3ba599f6230c64a7a690 Mon Sep 17 00:00:00 2001 From: Josh Bird Date: Tue, 14 Aug 2018 10:46:10 -0400 Subject: [PATCH 2/6] Fix missing slash, unused imports Removes some unused imports and adds in a slash that was missiong from the url being redirected to. --- waterbutler/server/api/v0/__init__.py | 16 +++++----------- 1 file changed, 5 insertions(+), 11 deletions(-) diff --git a/waterbutler/server/api/v0/__init__.py b/waterbutler/server/api/v0/__init__.py index d79148471..c42603352 100644 --- a/waterbutler/server/api/v0/__init__.py +++ b/waterbutler/server/api/v0/__init__.py @@ -1,18 +1,12 @@ - -import os -import socket -import asyncio from http import HTTPStatus import logging +from raven.contrib.tornado import SentryMixin import tornado.web import tornado.gen import tornado.platform.asyncio -from waterbutler.core import mime_types -from waterbutler.server import utils -from waterbutler.server.api.v0 import core -from waterbutler.core.streams import RequestStreamReader +from waterbutler.server.utils import CORsMixin logger = logging.getLogger(__name__) @@ -29,7 +23,7 @@ def list_or_value(value): @tornado.web.stream_request_body -class DownloadRedirectHandler(core.BaseProviderHandler): +class DownloadRedirectHandler(tornado.web.RequestHandler, CORsMixin, SentryMixin): async def prepare(self): @@ -49,12 +43,12 @@ async def prepare(self): direct = TRUTH_MAP[self.arguments.get('accept_url', 'true').lower()] version = self.arguments.get('version', self.arguments.get('revision', None)) - v1_url = '/v1/resources/{resource}/providers/{provider}{path}?{direct}{revision}'.format( + v1_url = '/v1/resources/{resource}/providers/{provider}/{path}?{direct}{version}'.format( resource=resource, provider=provider, path=path, direct='direct=&' if direct else '', - version='version={}&'.format(self.arguments['version']) if version else '' + version='version={}&'.format(version) if version else '' ) logger.info('Redirecting a v0 download request to v1') From 9fe375d9b195288ecb72281ca2eafeb4638d3a80 Mon Sep 17 00:00:00 2001 From: Josh Bird Date: Tue, 14 Aug 2018 13:15:24 -0400 Subject: [PATCH 3/6] Remove unused import --- waterbutler/server/api/v0/__init__.py | 1 - 1 file changed, 1 deletion(-) diff --git a/waterbutler/server/api/v0/__init__.py b/waterbutler/server/api/v0/__init__.py index c42603352..04e7c37bf 100644 --- a/waterbutler/server/api/v0/__init__.py +++ b/waterbutler/server/api/v0/__init__.py @@ -1,4 +1,3 @@ -from http import HTTPStatus import logging from raven.contrib.tornado import SentryMixin From 4df6c22c8f125c098be814c2b82177fc80acdeaa Mon Sep 17 00:00:00 2001 From: Josh Bird Date: Thu, 16 Aug 2018 11:45:01 -0400 Subject: [PATCH 4/6] Remove AuthHandler fetch Only v0 uses the fetch method on the auth handler. v1 uses the get method. This removes the fetch method and the tests for it. --- tests/server/api/v0/__init__.py | 0 tests/server/api/v0/test_copy.py | 101 ----------- tests/server/api/v0/test_crud.py | 245 -------------------------- tests/server/api/v0/test_move.py | 101 ----------- tests/server/api/v0/test_revisions.py | 53 ------ tests/server/api/v0/test_status.py | 22 --- tests/server/api/v0/test_zip.py | 34 ---- tests/utils.py | 83 --------- waterbutler/auth/osf/handler.py | 24 --- waterbutler/core/auth.py | 4 - waterbutler/server/auth.py | 7 - 11 files changed, 674 deletions(-) delete mode 100644 tests/server/api/v0/__init__.py delete mode 100644 tests/server/api/v0/test_copy.py delete mode 100644 tests/server/api/v0/test_crud.py delete mode 100644 tests/server/api/v0/test_move.py delete mode 100644 tests/server/api/v0/test_revisions.py delete mode 100644 tests/server/api/v0/test_status.py delete mode 100644 tests/server/api/v0/test_zip.py diff --git a/tests/server/api/v0/__init__.py b/tests/server/api/v0/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/tests/server/api/v0/test_copy.py b/tests/server/api/v0/test_copy.py deleted file mode 100644 index a042234e7..000000000 --- a/tests/server/api/v0/test_copy.py +++ /dev/null @@ -1,101 +0,0 @@ -import json - -from tornado import testing - -from waterbutler.core.path import WaterButlerPath - -from tests import utils - - -class TestCopyHandler(utils.MultiProviderHandlerTestCase): - HOOK_PATH = 'waterbutler.server.api.v0.copy.CopyHandler._send_hook' - - @testing.gen_test - def test_calls_copy(self): - self.source_provider.copy = utils.MockCoroutine( - return_value=(utils.MockFileMetadata(), False) - ) - - yield self.http_client.fetch( - self.get_url('/ops/copy'), - method='POST', - body=json.dumps(self.payload()) - ) - - assert self.source_provider.copy.called - self.source_provider.copy.assert_called_once_with( - self.destination_provider, - WaterButlerPath(self.payload()['source']['path']), - WaterButlerPath(self.payload()['destination']['path']), - rename=None, - conflict='replace' - ) - - @testing.gen_test - def test_conflict(self): - self.source_provider.copy = utils.MockCoroutine( - return_value=(utils.MockFileMetadata(), True) - ) - - payload = self.payload() - payload['conflict'] = 'keep' - - resp = yield self.http_client.fetch( - self.get_url('/ops/copy'), - method='POST', - body=json.dumps(payload) - ) - - assert resp.code == 201 - assert self.source_provider.copy.called - self.source_provider.copy.assert_called_once_with( - self.destination_provider, - WaterButlerPath(payload['source']['path']), - WaterButlerPath(payload['destination']['path']), - rename=None, - conflict='keep' - ) - - @testing.gen_test - def test_rename(self): - metadata = utils.MockFileMetadata() - self.source_provider.copy = utils.MockCoroutine( - return_value=(metadata, False) - ) - - payload = self.payload() - payload['rename'] = 'MyCoolFileGuys' - - resp = yield self.http_client.fetch( - self.get_url('/ops/copy'), - method='POST', - body=json.dumps(payload) - ) - - assert resp.code == 200 - assert json.loads(resp.body.decode()) == metadata.serialized() - assert self.source_provider.copy.called - self.source_provider.copy.assert_called_once_with( - self.destination_provider, - WaterButlerPath(payload['source']['path']), - WaterButlerPath(payload['destination']['path']), - rename='MyCoolFileGuys', - conflict='replace' - ) - - @testing.gen_test - def test_intra_makes_callback(self): - self.source_provider.copy = utils.MockCoroutine( - return_value=(utils.MockFileMetadata(), False) - ) - - yield self.http_client.fetch( - self.get_url('/ops/copy'), - method='POST', - body=json.dumps(self.payload()) - ) - - self.mock_send_hook.assert_called_once_with( - 'copy', - utils.MockFileMetadata() - ) diff --git a/tests/server/api/v0/test_crud.py b/tests/server/api/v0/test_crud.py deleted file mode 100644 index 15d92d44c..000000000 --- a/tests/server/api/v0/test_crud.py +++ /dev/null @@ -1,245 +0,0 @@ -import pytest - -import json -import asyncio - -from tornado import testing -from tornado import httpclient - -from waterbutler.core import streams -from waterbutler.core import exceptions - -from tests import utils - - -class TestCrudHandler(utils.HandlerTestCase): - - HOOK_PATH = 'waterbutler.server.api.v0.crud.CRUDHandler._send_hook' - - @testing.gen_test - def test_download_redirect(self): - redirect_url = 'http://queen.com/freddie.png' - - self.mock_provider.download = utils.MockCoroutine(return_value=redirect_url) - - with pytest.raises(httpclient.HTTPError) as exc: - yield self.http_client.fetch( - self.get_url('/file?provider=queenhub&path=/freddie.png'), - follow_redirects=False, - ) - assert exc.value.code == 302 - assert exc.value.response.headers.get('Location') == redirect_url - calls = self.mock_provider.download.call_args_list - assert len(calls) == 1 - args, kwargs = calls[0] - assert kwargs.get('action') == 'download' - - @testing.gen_test - def test_download_stream(self): - data = b'freddie brian john roger' - stream = streams.StringStream(data) - stream.name = 'foo' - stream.content_type = 'application/octet-stream' - self.mock_provider.download = utils.MockCoroutine(return_value=stream) - - resp = yield self.http_client.fetch( - self.get_url('/file?provider=queenhub&path=/freddie.png'), - ) - assert resp.body == data - calls = self.mock_provider.download.call_args_list - assert len(calls) == 1 - args, kwargs = calls[0] - assert kwargs.get('action') == 'download' - - @testing.gen_test - def test_download_stream_range(self): - data = b'freddie brian john roger' - stream = streams.StringStream(data) - stream.name = 'foo' - stream.partial = True - stream.content_range = '0-{}/{}'.format(len(data) - 1, len(data)) - stream.content_type = 'application/octet-stream' - - self.mock_provider.download = utils.MockCoroutine(return_value=stream) - - resp = yield self.http_client.fetch( - self.get_url('/file?provider=queenhub&path=/freddie.png'), - headers={'Range': 'bytes=0-'} - ) - - assert resp.code == 206 - assert resp.body == data - calls = self.mock_provider.download.call_args_list - assert len(calls) == 1 - args, kwargs = calls[0] - assert kwargs.get('action') == 'download' - assert kwargs.get('range') == (0, None) - - @testing.gen_test - def test_download_content_type_switches(self): - """waterbutler.core.mime_types contains content type - overrides. - """ - data = b'freddie brian john roger' - stream = streams.StringStream(data) - stream.name = None - stream.content_type = 'application/octet-stream' - self.mock_provider.download = utils.MockCoroutine(return_value=stream) - - resp = yield self.http_client.fetch( - self.get_url('/file?provider=queenhub&path=/freddie.md'), - ) - assert resp.body == data - assert resp.headers['Content-Type'] == 'text/x-markdown' - calls = self.mock_provider.download.call_args_list - assert len(calls) == 1 - args, kwargs = calls[0] - assert kwargs.get('action') == 'download' - - @testing.gen_test - def test_download_content_type_does_not_switch(self): - """mime_types should not override file extension not in the dict - """ - data = b'freddie brian john roger' - stream = streams.StringStream(data) - stream.name = None - stream.content_type = 'application/octet-stream' - self.mock_provider.download = utils.MockCoroutine(return_value=stream) - - resp = yield self.http_client.fetch( - self.get_url('/file?provider=queenhub&path=/freddie.png'), - ) - assert resp.body == data - assert resp.headers['Content-Type'] == 'application/octet-stream' - calls = self.mock_provider.download.call_args_list - assert len(calls) == 1 - args, kwargs = calls[0] - assert kwargs.get('action') == 'download' - - @testing.gen_test - def test_download_accept_url_false(self): - data = b'freddie brian john roger' - stream = streams.StringStream(data) - stream.name = 'foo' - stream.content_type = 'application/octet-stream' - self.mock_provider.download = utils.MockCoroutine(return_value=stream) - - resp = yield self.http_client.fetch( - self.get_url('/file?provider=queenhub&path=/freddie.png&accept_url=false'), - ) - assert resp.body == data - calls = self.mock_provider.download.call_args_list - assert len(calls) == 1 - args, kwargs = calls[0] - assert kwargs.get('action') == 'download' - assert kwargs.get('accept_url') is False - - @testing.gen_test - def test_download_accept_url_default(self): - data = b'freddie brian john roger' - stream = streams.StringStream(data) - stream.name = 'foo' - stream.content_type = 'application/octet-stream' - self.mock_provider.download = utils.MockCoroutine(return_value=stream) - - resp = yield self.http_client.fetch( - self.get_url('/file?provider=queenhub&path=/freddie.png'), - ) - assert resp.body == data - calls = self.mock_provider.download.call_args_list - assert len(calls) == 1 - args, kwargs = calls[0] - assert kwargs.get('action') == 'download' - assert kwargs.get('accept_url') is True - - @testing.gen_test - def test_download_accept_url_true(self): - data = b'freddie brian john roger' - stream = streams.StringStream(data) - stream.name = 'foo' - stream.content_type = 'application/octet-stream' - self.mock_provider.download = utils.MockCoroutine(return_value=stream) - - resp = yield self.http_client.fetch( - self.get_url('/file?provider=queenhub&path=/freddie.png&accept_url=true'), - ) - assert resp.body == data - calls = self.mock_provider.download.call_args_list - assert len(calls) == 1 - args, kwargs = calls[0] - assert kwargs.get('action') == 'download' - assert kwargs.get('accept_url') is True - - @testing.gen_test - def test_download_accept_url_invalid(self): - self.mock_provider.download = utils.MockCoroutine() - - with pytest.raises(httpclient.HTTPError) as exc: - yield self.http_client.fetch( - self.get_url('/file?provider=queenhub&path=/freddie.png&accept_url=teapot'), - ) - assert exc.value.code == 400 - assert self.mock_provider.download.called is False - - @testing.gen_test - def test_download_not_found(self): - self.mock_provider.download = utils.MockCoroutine(side_effect=exceptions.NotFoundError('/freddie.png')) - - with pytest.raises(httpclient.HTTPError) as exc: - yield self.http_client.fetch( - self.get_url('/file?provider=queenhub&path=/freddie.png'), - ) - - assert exc.value.code == 404 - - @testing.gen_test - def test_upload(self): - data = b'stone cold crazy' - expected = utils.MockFileMetadata() - self.mock_provider.upload = utils.MockCoroutine(return_value=(expected, True)) - - resp = yield self.http_client.fetch( - self.get_url('/file?provider=queenhub&path=/roger.png'), - method='PUT', - body=data, - ) - - calls = self.mock_provider.upload.call_args_list - assert len(calls) == 1 - args, kwargs = calls[0] - assert isinstance(args[0], streams.RequestStreamReader) - streamed = yield args[0].read() - assert streamed == data - assert kwargs['action'] == 'upload' - assert str(kwargs['path']) == '/roger.png' - assert expected.serialized() == json.loads(resp.body.decode()) - - @testing.gen_test - def test_delete(self): - self.mock_provider.delete = utils.MockCoroutine() - - resp = yield self.http_client.fetch( - self.get_url('/file?provider=queenhub&path=/john.png'), - method='DELETE', - ) - - calls = self.mock_provider.delete.call_args_list - assert len(calls) == 1 - args, kwargs = calls[0] - assert kwargs.get('action') == 'delete' - assert resp.code == 204 - - @testing.gen_test - def test_create_folder(self): - self.mock_provider.create_folder = utils.MockCoroutine(return_value=utils.MockFolderMetadata()) - - resp = yield self.http_client.fetch( - self.get_url('/file?provider=queenhub&path=/folder/'), - method='POST', - body='' - ) - calls = self.mock_provider.create_folder.call_args_list - assert len(calls) == 1 - args, kwargs = calls[0] - assert kwargs.get('action') == 'create_folder' - assert resp.code == 201 diff --git a/tests/server/api/v0/test_move.py b/tests/server/api/v0/test_move.py deleted file mode 100644 index 812766373..000000000 --- a/tests/server/api/v0/test_move.py +++ /dev/null @@ -1,101 +0,0 @@ -import json - -from tornado import testing - -from waterbutler.core.path import WaterButlerPath - -from tests import utils - - -class TestMoveHandler(utils.MultiProviderHandlerTestCase): - HOOK_PATH = 'waterbutler.server.api.v0.move.MoveHandler._send_hook' - - @testing.gen_test - def test_calls_move(self): - self.source_provider.move = utils.MockCoroutine( - return_value=(utils.MockFileMetadata(), False) - ) - - yield self.http_client.fetch( - self.get_url('/ops/move'), - method='POST', - body=json.dumps(self.payload()) - ) - - assert self.source_provider.move.called - self.source_provider.move.assert_called_once_with( - self.destination_provider, - WaterButlerPath(self.payload()['source']['path']), - WaterButlerPath(self.payload()['destination']['path']), - rename=None, - conflict='replace' - ) - - @testing.gen_test - def test_conflict(self): - self.source_provider.move = utils.MockCoroutine( - return_value=(utils.MockFileMetadata(), True) - ) - - payload = self.payload() - payload['conflict'] = 'keep' - - resp = yield self.http_client.fetch( - self.get_url('/ops/move'), - method='POST', - body=json.dumps(payload) - ) - - assert resp.code == 201 - assert self.source_provider.move.called - self.source_provider.move.assert_called_once_with( - self.destination_provider, - WaterButlerPath(payload['source']['path']), - WaterButlerPath(payload['destination']['path']), - rename=None, - conflict='keep' - ) - - @testing.gen_test - def test_rename(self): - metadata = utils.MockFileMetadata() - self.source_provider.move = utils.MockCoroutine( - return_value=(metadata, False) - ) - - payload = self.payload() - payload['rename'] = 'MyCoolFileGuys' - - resp = yield self.http_client.fetch( - self.get_url('/ops/move'), - method='POST', - body=json.dumps(payload) - ) - - assert resp.code == 200 - assert json.loads(resp.body.decode()) == metadata.serialized() - assert self.source_provider.move.called - self.source_provider.move.assert_called_once_with( - self.destination_provider, - WaterButlerPath(payload['source']['path']), - WaterButlerPath(payload['destination']['path']), - rename='MyCoolFileGuys', - conflict='replace' - ) - - @testing.gen_test - def test_intra_makes_callback(self): - self.source_provider.move = utils.MockCoroutine( - return_value=(utils.MockFileMetadata(), False) - ) - - yield self.http_client.fetch( - self.get_url('/ops/move'), - method='POST', - body=json.dumps(self.payload()) - ) - - self.mock_send_hook.assert_called_once_with( - 'move', - utils.MockFileMetadata() - ) diff --git a/tests/server/api/v0/test_revisions.py b/tests/server/api/v0/test_revisions.py deleted file mode 100644 index 8ee6d4e22..000000000 --- a/tests/server/api/v0/test_revisions.py +++ /dev/null @@ -1,53 +0,0 @@ -import json -from unittest import mock - -from tornado import testing - -from tests import utils - - -class TestRevisionHandler(utils.HandlerTestCase): - - HOOK_PATH = 'waterbutler.server.api.v0.revisions.RevisionHandler._send_hook' - - @testing.gen_test - def test_get_coro(self): - expected = [ - utils.MockFileMetadata(), - utils.MockFolderMetadata() - ] - - self.mock_provider.revisions = utils.MockCoroutine(return_value=expected) - - resp = yield self.http_client.fetch( - self.get_url('/revisions?provider=queenhub&path=/brian.tiff'), - ) - - assert {'data': [m.serialized() for m in expected]} == json.loads(resp.body.decode()) - - @testing.gen_test - def test_get_not_coro(self): - expected = [ - utils.MockFileMetadata(), - utils.MockFolderMetadata() - ] - - self.mock_provider.revisions = mock.Mock(return_value=expected) - - resp = yield self.http_client.fetch( - self.get_url('/revisions?provider=queenhub&path=/brian.tiff'), - ) - - assert {'data': [m.serialized() for m in expected]} == json.loads(resp.body.decode()) - - @testing.gen_test - def test_get_empty(self): - expected = [] - - self.mock_provider.revisions = mock.Mock(return_value=expected) - - resp = yield self.http_client.fetch( - self.get_url('/revisions?provider=queenhub&path=/brian.tiff'), - ) - - assert {'data': [m.serialized() for m in expected]} == json.loads(resp.body.decode()) diff --git a/tests/server/api/v0/test_status.py b/tests/server/api/v0/test_status.py deleted file mode 100644 index 6fc5ef712..000000000 --- a/tests/server/api/v0/test_status.py +++ /dev/null @@ -1,22 +0,0 @@ -import json -from http import HTTPStatus - -from tornado import testing - -from tests import utils -from waterbutler.version import __version__ - - -class TestStatusHandler(utils.HandlerTestCase): - - @testing.gen_test - def test_get_coro(self): - expected = { - 'status': 'up', - 'version': __version__, - } - resp = yield self.http_client.fetch( - self.get_url('/status'), - ) - assert resp.code == HTTPStatus.OK - assert expected == json.loads(resp.body.decode()) diff --git a/tests/server/api/v0/test_zip.py b/tests/server/api/v0/test_zip.py deleted file mode 100644 index ab9e7008f..000000000 --- a/tests/server/api/v0/test_zip.py +++ /dev/null @@ -1,34 +0,0 @@ -import io -import zipfile - -from tornado import testing - -from waterbutler.core import streams -from waterbutler.core.utils import AsyncIterator - -from tests import utils - - -class TestZipHandler(utils.HandlerTestCase): - - HOOK_PATH = 'waterbutler.server.api.v0.zip.ZipHandler._send_hook' - - @testing.gen_test - def test_download_stream(self): - data = b'freddie brian john roger' - stream = streams.StringStream(data) - stream.content_type = 'application/octet-stream' - - zipstream = streams.ZipStreamReader(AsyncIterator([('file.txt', stream)])) - - self.mock_provider.zip = utils.MockCoroutine(return_value=zipstream) - - resp = yield self.http_client.fetch( - self.get_url('/zip?provider=queenhub&path=/freddie.png'), - ) - - zip = zipfile.ZipFile(io.BytesIO(resp.body)) - - assert zip.testzip() is None - - assert zip.open('file.txt').read() == data diff --git a/tests/utils.py b/tests/utils.py index 42b467c75..0f41c8d1c 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -154,89 +154,6 @@ def can_intra_copy(self, other, path=None): return self.__class__ == other.__class__ -class HandlerTestCase(testing.AsyncHTTPTestCase): - - def setUp(self): - policy = asyncio.get_event_loop_policy() - policy.get_event_loop().close() - self.event_loop = policy.new_event_loop() - policy.set_event_loop(self.event_loop) - - super().setUp() - - def get_identity(*args, **kwargs): - return copy.deepcopy({ - 'auth': {}, - 'credentials': {}, - 'settings': {}, - 'callback_url': 'example.com' - }) - - self.mock_identity = MockCoroutine(side_effect=get_identity) - - # self.mock_identity.return_value = identity_future - self.identity_patcher = mock.patch('waterbutler.server.api.v0.core.auth_handler.fetch', self.mock_identity) - - self.mock_provider = MockProvider1({}, {}, {}) - self.mock_make_provider = mock.Mock(return_value=self.mock_provider) - self.make_provider_patcher = mock.patch('waterbutler.core.utils.make_provider', self.mock_make_provider) - - if hasattr(self, 'HOOK_PATH'): - self.mock_send_hook = mock.Mock() - self.send_hook_patcher = mock.patch(self.HOOK_PATH, self.mock_send_hook) - self.send_hook_patcher.start() - - self.identity_patcher.start() - self.make_provider_patcher.start() - - def tearDown(self): - super().tearDown() - self.identity_patcher.stop() - if hasattr(self, 'HOOK_PATH'): - self.send_hook_patcher.stop() - self.make_provider_patcher.stop() - self.event_loop.close() - - def get_app(self): - return make_app(debug=False) - - def get_new_ioloop(self): - return AsyncIOMainLoop() - - -class MultiProviderHandlerTestCase(HandlerTestCase): - - def setUp(self): - super().setUp() - self.source_provider = MockProvider2({}, {}, {}) - self.destination_provider = MockProvider2({}, {}, {}) - - self.mock_make_provider.return_value = None - self.mock_make_provider.side_effect = [ - self.source_provider, - self.destination_provider - ] - - def tearDown(self): - super().tearDown() - - def payload(self): - return copy.deepcopy({ - 'source': { - 'nid': 'foo', - 'provider': 'source', - 'path': '/source/path', - 'callback_url': 'example.com' - }, - 'destination': { - 'nid': 'bar', - 'provider': 'destination', - 'path': '/destination/path', - 'callback_url': 'example.com' - } - }) - - class TempFilesContext: def __init__(self): self._dir = tempfile.mkdtemp() diff --git a/waterbutler/auth/osf/handler.py b/waterbutler/auth/osf/handler.py index 0d068f73e..2d68fac9b 100644 --- a/waterbutler/auth/osf/handler.py +++ b/waterbutler/auth/osf/handler.py @@ -66,30 +66,6 @@ async def make_request(self, params, headers, cookies): except (jwt.InvalidTokenError, KeyError): raise exceptions.AuthError(data, code=response.status) - async def fetch(self, request, bundle): - """Used for v0""" - headers = {'Content-Type': 'application/json'} - - if 'Authorization' in request.headers: - headers['Authorization'] = request.headers['Authorization'] - - cookie = request.query_arguments.get('cookie') - if cookie: - cookie = cookie[0].decode() - - view_only = request.query_arguments.get('view_only') - if view_only: - view_only = view_only[0].decode() - - payload = (await self.make_request( - self.build_payload(bundle, cookie=cookie, view_only=view_only), - headers, - dict(request.cookies) - )) - - payload['auth']['callback_url'] = payload['callback_url'] - return payload - async def get(self, resource, provider, request, action=None, auth_type=AuthType.SOURCE): """Used for v1""" method = request.method.lower() diff --git a/waterbutler/core/auth.py b/waterbutler/core/auth.py index fe89b65ec..01fe2cd4f 100644 --- a/waterbutler/core/auth.py +++ b/waterbutler/core/auth.py @@ -9,10 +9,6 @@ class AuthType(Enum): class BaseAuthHandler(metaclass=abc.ABCMeta): - @abc.abstractmethod - async def fetch(self, request, bundle): - pass - @abc.abstractmethod async def get(self, resource, provider, request, action=None, auth_type=AuthType.SOURCE): pass diff --git a/waterbutler/server/auth.py b/waterbutler/server/auth.py index df67940e1..31ae4c983 100644 --- a/waterbutler/server/auth.py +++ b/waterbutler/server/auth.py @@ -14,13 +14,6 @@ def __init__(self, names): name_order=True, ) - async def fetch(self, request, bundle): - for extension in self.manager.extensions: - credential = await extension.obj.fetch(request, bundle) - if credential: - return credential - raise AuthHandler('no valid credential found') - async def get(self, resource, provider, request, action=None, auth_type=AuthType.SOURCE): for extension in self.manager.extensions: credential = await extension.obj.get(resource, provider, request, action=action, auth_type=auth_type) From 1ce86b9fd606dc6777db648f102cb9b0c0a4c874 Mon Sep 17 00:00:00 2001 From: Josh Bird Date: Thu, 16 Aug 2018 13:57:35 -0400 Subject: [PATCH 5/6] Remove validate path, and rename validate_v1_path validate_path is only used by v0, so by removing v0, we can remove validate_path as well. validate_v1_path also gets renamed validate_path to cut down any confusion about the naming --- docs/adding-providers.rst | 1 - docs/api.rst | 9 - docs/overview.rst | 2 - tests/providers/bitbucket/fixtures.py | 4 +- tests/providers/bitbucket/test_provider.py | 44 ++-- tests/providers/box/fixtures.py | 2 +- tests/providers/box/test_provider.py | 42 +--- tests/providers/cloudfiles/test_provider.py | 6 +- tests/providers/dataverse/test_provider.py | 195 +++++++++++------- tests/providers/dropbox/fixtures.py | 2 +- tests/providers/dropbox/test_provider.py | 28 +-- tests/providers/figshare/test_provider.py | 48 ++--- tests/providers/filesystem/test_provider.py | 20 +- tests/providers/github/fixtures.py | 2 +- tests/providers/github/test_provider.py | 20 +- tests/providers/gitlab/test_provider.py | 34 +-- tests/providers/googlecloud/test_provider.py | 44 ++-- tests/providers/googledrive/fixtures.py | 2 +- tests/providers/googledrive/test_provider.py | 22 +- tests/providers/onedrive/fixtures.py | 4 +- tests/providers/onedrive/test_provider.py | 92 +++------ tests/providers/osfstorage/fixtures.py | 4 +- tests/providers/osfstorage/test_provider.py | 45 ++-- tests/providers/owncloud/test_provider.py | 31 +-- tests/providers/s3/test_provider.py | 22 +- tests/utils.py | 6 - waterbutler/core/metadata.py | 4 - waterbutler/core/provider.py | 35 +--- waterbutler/providers/bitbucket/provider.py | 33 +-- waterbutler/providers/box/provider.py | 66 +----- waterbutler/providers/cloudfiles/provider.py | 3 - waterbutler/providers/dataverse/provider.py | 9 +- waterbutler/providers/dropbox/provider.py | 7 +- waterbutler/providers/figshare/provider.py | 126 +---------- waterbutler/providers/filesystem/provider.py | 5 +- waterbutler/providers/github/provider.py | 31 +-- waterbutler/providers/gitlab/provider.py | 57 +++-- waterbutler/providers/googlecloud/provider.py | 3 - waterbutler/providers/googledrive/provider.py | 10 +- waterbutler/providers/onedrive/provider.py | 67 +----- waterbutler/providers/osfstorage/provider.py | 41 +--- waterbutler/providers/owncloud/provider.py | 41 +--- waterbutler/providers/s3/provider.py | 7 +- .../server/api/v1/provider/__init__.py | 2 +- .../server/api/v1/provider/movecopy.py | 2 +- 45 files changed, 397 insertions(+), 883 deletions(-) diff --git a/docs/adding-providers.rst b/docs/adding-providers.rst index 20a5b1422..34b799510 100644 --- a/docs/adding-providers.rst +++ b/docs/adding-providers.rst @@ -4,7 +4,6 @@ Adding A New Provider The job of the provider is to translate our common RESTful API into actions against the external provider. The WaterButler API v1 handler (waterbutler.server.api.v1.provider) accepts the incoming requests, builds the appropriate provider object, does some basic validation on the inputs, then passes the request data off to the provider action method. A new provider will inherit from `waterbutler.core.provider.BaseProvider` and implement some or all of the following methods:: validate_path() abstract - validate_v1_path() abstract download() abstract metadata() abstract upload() abstract diff --git a/docs/api.rst b/docs/api.rst index 1072d12bf..87af9b11f 100644 --- a/docs/api.rst +++ b/docs/api.rst @@ -1,15 +1,6 @@ API === -v0 API ------- - -.. warning:: - - The v0 WaterButler API is deprecated and should no longer be used. It is only documented to provide a reference for legacy consumers. - -TODO: v0 api docs - v1 API ------ diff --git a/docs/overview.rst b/docs/overview.rst index fa88b800e..29600b0d9 100644 --- a/docs/overview.rst +++ b/docs/overview.rst @@ -44,8 +44,6 @@ If the user is interacting with WaterButler via the OSF, the diagram looks like Only one auth provider so far, the OSF. -Two APIs, v0 and v1. v0 is deprecated. - Terminology ----------- diff --git a/tests/providers/bitbucket/fixtures.py b/tests/providers/bitbucket/fixtures.py index 11b180e03..8a785bbcf 100644 --- a/tests/providers/bitbucket/fixtures.py +++ b/tests/providers/bitbucket/fixtures.py @@ -283,11 +283,11 @@ def revision_metadata(): } -# fixtures for testing permutations of validate_v1_path & co. +# Fixtures for testing permutations of path validation. with open(os.path.join(os.path.dirname(__file__), 'fixtures/validate_path.json'), 'r') as fp: validate_path = json.load(fp) -# fixtures for testing file revision metadata +# Fixtures for testing file revision metadata with open(os.path.join(os.path.dirname(__file__), 'fixtures/revisions.json'), 'r') as fp: revisions = json.load(fp) diff --git a/tests/providers/bitbucket/test_provider.py b/tests/providers/bitbucket/test_provider.py index ea9ad8248..b0994208a 100644 --- a/tests/providers/bitbucket/test_provider.py +++ b/tests/providers/bitbucket/test_provider.py @@ -45,7 +45,7 @@ class TestValidatePath: @pytest.mark.asyncio @pytest.mark.aiohttpretty - async def test_validate_v1_path_root(self, provider): + async def test_validate_path_root(self, provider): test_fixtures = fixtures.validate_path default_branch_body = test_fixtures['default_branch'] @@ -53,15 +53,12 @@ async def test_validate_v1_path_root(self, provider): aiohttpretty.register_json_uri('GET', default_branch_url, body=default_branch_body) try: - wb_path_v1 = await provider.validate_v1_path('/') + wb_path = await provider.validate_path('/') except Exception as exc: pytest.fail(str(exc)) - wb_path_v0 = await provider.validate_path('/') - - assert wb_path_v1 == wb_path_v0 - assert wb_path_v1.branch_name == default_branch_body['name'] - assert wb_path_v1.commit_sha == None + assert wb_path.branch_name == default_branch_body['name'] + assert wb_path.commit_sha == None @pytest.mark.asyncio @pytest.mark.aiohttpretty @@ -69,7 +66,7 @@ async def test_validate_v1_path_root(self, provider): ('/foo-file.txt', 'file'), ('/foo-dir/', 'folder'), ]) - async def test_validate_v1_path(self, provider, path, kind): + async def test_validate_path(self, provider, path, kind): test_fixtures = fixtures.validate_path default_branch_body = test_fixtures['default_branch'] @@ -82,19 +79,16 @@ async def test_validate_v1_path(self, provider, path, kind): aiohttpretty.register_json_uri('GET', dir_listing_url, body=dir_listing_body) try: - wb_path_v1 = await provider.validate_v1_path(path) + wb_path = await provider.validate_path(path) except Exception as exc: pytest.fail(str(exc)) - wb_path_v0 = await provider.validate_path(path) - - assert wb_path_v1 == wb_path_v0 - assert wb_path_v1.branch_name == default_branch + assert wb_path.branch_name == default_branch # TODO: assert commitSha bad_path = path.rstrip('/') if kind == 'folder' else path + '/' with pytest.raises(exceptions.NotFoundError) as exc: - await provider.validate_v1_path(bad_path) + await provider.validate_path(bad_path) @pytest.mark.asyncio @pytest.mark.aiohttpretty @@ -104,7 +98,7 @@ async def test_validate_v1_path(self, provider, path, kind): ('revision', 'bleep-blorp', 'branch_name'), ('revision', '345def023ab29', 'commit_sha'), ]) - async def test_validate_v1_path_commit_sha(self, provider, arg_name, arg_val, attr_name): + async def test_validate_path_commit_sha(self, provider, arg_name, arg_val, attr_name): test_fixtures = fixtures.validate_path dir_listing_body = test_fixtures['root_dir_listing'] @@ -115,7 +109,7 @@ async def test_validate_v1_path_commit_sha(self, provider, arg_name, arg_val, at path = '/foo-file.txt' kwargs = {arg_name: arg_val} try: - wb_path_v1 = await provider.validate_v1_path(path, **kwargs) + wb_path = await provider.validate_path(path, **kwargs) except Exception as exc: pytest.fail(str(exc)) @@ -130,35 +124,29 @@ async def test_validate_v1_path_commit_sha(self, provider, arg_name, arg_val, at commit_sha = ref_val branch_name = None if attr_name == 'commit_sha' else arg_val - assert getattr(wb_path_v1, attr_name) == arg_val - assert wb_path_v1.ref == ref_val - assert wb_path_v1.extra == { + assert getattr(wb_path, attr_name) == arg_val + assert wb_path.ref == ref_val + assert wb_path.extra == { 'commitSha': commit_sha, 'branchName': branch_name, } - wb_path_v0 = await provider.validate_path(path, **kwargs) - assert wb_path_v1 == wb_path_v0 - @pytest.mark.asyncio @pytest.mark.aiohttpretty - async def test_validate_v1_path_subfolder(self, provider): + async def test_validate_path_subfolder(self, provider): test_fixtures = fixtures.validate_path dir_listing_body = test_fixtures['subfolder_dir_listing'] base_commit = dir_listing_body['node'] - dir_listing_url = provider._build_v1_repo_url('src', 'main-branch', 'subfolder') + '/' + dir_listing_url = provider._build_repo_url('src', 'main-branch', 'subfolder') + '/' aiohttpretty.register_json_uri('GET', dir_listing_url, body=dir_listing_body) path = '/subfolder/.gitkeep' try: - wb_path_v1 = await provider.validate_v1_path(path, branch='main-branch') + wb_path = await provider.validate_path(path, branch='main-branch') except Exception as exc: pytest.fail(str(exc)) - wb_path_v0 = await provider.validate_path(path, branch='main-branch') - assert wb_path_v1 == wb_path_v0 - class TestRevisions: diff --git a/tests/providers/box/fixtures.py b/tests/providers/box/fixtures.py index d8cd2c443..035774e64 100644 --- a/tests/providers/box/fixtures.py +++ b/tests/providers/box/fixtures.py @@ -5,7 +5,7 @@ @pytest.fixture def root_provider_fixtures(): - # fixtures for testing validate_v1_path for root provider + # fixtures for testing validate_path for root provider with open(os.path.join(os.path.dirname(__file__), 'fixtures/root_provider.json'), 'r') as fp: return json.load(fp) diff --git a/tests/providers/box/test_provider.py b/tests/providers/box/test_provider.py index cb773b281..8227de39c 100644 --- a/tests/providers/box/test_provider.py +++ b/tests/providers/box/test_provider.py @@ -78,7 +78,7 @@ class TestValidatePath: @pytest.mark.asyncio @pytest.mark.aiohttpretty - async def test_validate_v1_path_file(self, provider, root_provider_fixtures): + async def test_validate_path_file(self, provider, root_provider_fixtures): file_id = '5000948880' good_url = provider.build_url('files', file_id, fields='id,name,path_collection') @@ -90,22 +90,18 @@ async def test_validate_v1_path_file(self, provider, root_provider_fixtures): aiohttpretty.register_uri('get', bad_url, status=404) try: - wb_path_v1 = await provider.validate_v1_path('/' + file_id) + wb_path = await provider.validate_path('/' + file_id) except Exception as exc: pytest.fail(str(exc)) with pytest.raises(exceptions.NotFoundError) as exc: - await provider.validate_v1_path('/' + file_id + '/') + await provider.validate_path('/' + file_id + '/') assert exc.value.code == HTTPStatus.NOT_FOUND - wb_path_v0 = await provider.validate_path('/' + file_id) - - assert wb_path_v1 == wb_path_v0 - @pytest.mark.asyncio @pytest.mark.aiohttpretty - async def test_validate_v1_path_folder(self, provider, root_provider_fixtures): + async def test_validate_path_folder(self, provider, root_provider_fixtures): provider.folder = '0' folder_id = '11446498' @@ -117,19 +113,15 @@ async def test_validate_v1_path_folder(self, provider, root_provider_fixtures): status=200) aiohttpretty.register_uri('get', bad_url, status=404) try: - wb_path_v1 = await provider.validate_v1_path('/' + folder_id + '/') + wb_path = await provider.validate_path('/' + folder_id + '/') except Exception as exc: pytest.fail(str(exc)) with pytest.raises(exceptions.NotFoundError) as exc: - await provider.validate_v1_path('/' + folder_id) + await provider.validate_path('/' + folder_id) assert exc.value.code == HTTPStatus.NOT_FOUND - wb_path_v0 = await provider.validate_path('/' + folder_id + '/') - - assert wb_path_v1 == wb_path_v0 - @pytest.mark.asyncio @pytest.mark.aiohttpretty async def test_validate_path_root(self, provider): @@ -140,32 +132,14 @@ async def test_validate_path_root(self, provider): @pytest.mark.asyncio @pytest.mark.aiohttpretty - async def test_validate_v1_path_root(self, provider): - path = await provider.validate_v1_path('/') - assert path.is_dir - assert len(path.parts) == 1 - assert path.name == '' - - @pytest.mark.asyncio - @pytest.mark.aiohttpretty - async def test_validate_v1_path_bad_path(self, provider): + async def test_validate_path_bad_path(self, provider): with pytest.raises(exceptions.NotFoundError) as e: - await provider.validate_v1_path('/bulbasaur') + await provider.validate_path('/bulbasaur') assert e.value.message == 'Could not retrieve file or directory /bulbasaur' assert e.value.code == 404 - @pytest.mark.asyncio - @pytest.mark.aiohttpretty - async def test_validate_path_bad_path(self, provider): - - with pytest.raises(exceptions.MetadataError) as e: - await provider.validate_path('/bulbasaur/charmander') - - assert e.value.message == 'Could not find /bulbasaur/charmander' - assert e.value.code == 404 - @pytest.mark.asyncio @pytest.mark.aiohttpretty async def test_validate_path(self, provider, root_provider_fixtures): diff --git a/tests/providers/cloudfiles/test_provider.py b/tests/providers/cloudfiles/test_provider.py index da8bb55a5..746d09f01 100644 --- a/tests/providers/cloudfiles/test_provider.py +++ b/tests/providers/cloudfiles/test_provider.py @@ -651,13 +651,13 @@ async def test_metadata_file_bad_content_type(self, connected_provider, file_met await connected_provider.metadata(path) -class TestV1ValidatePath: +class TestValidatePath: @pytest.mark.asyncio @pytest.mark.aiohttpretty - async def test_v1_validate_path(self, connected_provider): + async def test_validate_path(self, connected_provider): path = '/ab4x3' - result = await connected_provider.validate_v1_path(path) + result = await connected_provider.validate_path(path) assert result.path == path.strip('/') diff --git a/tests/providers/dataverse/test_provider.py b/tests/providers/dataverse/test_provider.py index 141abe45c..3b11edf61 100644 --- a/tests/providers/dataverse/test_provider.py +++ b/tests/providers/dataverse/test_provider.py @@ -48,70 +48,81 @@ class TestValidatePath: @pytest.mark.asyncio @pytest.mark.aiohttpretty - async def test_validate_v1_path_file(self, provider, native_dataset_metadata): - draft_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, 'latest'), - key=provider.token) - published_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, - 'latest-published'), - key=provider.token) + async def test_validate_path_file(self, provider, native_dataset_metadata): + """ + """ + draft_url = provider.build_url( + dvs.JSON_BASE_URL.format(provider._id, 'latest'), + key=provider.token + ) + published_url = provider.build_url( + dvs.JSON_BASE_URL.format(provider._id, 'latest-published'), + key=provider.token + ) - aiohttpretty.register_json_uri('GET', - draft_url, - status=200, - body=native_dataset_metadata) - aiohttpretty.register_json_uri('GET', - published_url, - status=200, - body=native_dataset_metadata) + aiohttpretty.register_json_uri( + 'GET', + draft_url, + status=200, + body=native_dataset_metadata + ) + aiohttpretty.register_json_uri( + 'GET', + published_url, + status=200, + body=native_dataset_metadata + ) path = '/21' try: - wb_path_v1 = await provider.validate_v1_path(path) + wb_path = await provider.validate_path(path) except Exception as exc: pytest.fail(str(exc)) with pytest.raises(exceptions.NotFoundError) as exc: - await provider.validate_v1_path(path + '/') + await provider.validate_path(path + '/') assert exc.value.code == client.NOT_FOUND - wb_path_v0 = await provider.validate_path(path) - - assert wb_path_v1 == wb_path_v0 - @pytest.mark.asyncio @pytest.mark.aiohttpretty - async def test_validate_v1_path_folder(self, provider): + async def test_validate_path_folder(self, provider): + """ + """ try: - wb_path_v1 = await provider.validate_v1_path('/') + wb_path = await provider.validate_path('/') except Exception as exc: pytest.fail(str(exc)) - wb_path_v0 = await provider.validate_path('/') - - assert wb_path_v1 == wb_path_v0 - @pytest.mark.asyncio @pytest.mark.aiohttpretty async def test_revalidate_path(self, provider, native_dataset_metadata): - draft_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, 'latest'), - key=provider.token) - published_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, - 'latest-published'), - key=provider.token) - - aiohttpretty.register_json_uri('GET', - draft_url, - status=200, - body=native_dataset_metadata) - aiohttpretty.register_json_uri('GET', - published_url, - status=200, - body=native_dataset_metadata) + """ + """ + draft_url = provider.build_url( + dvs.JSON_BASE_URL.format(provider._id, 'latest'), + key=provider.token + ) + published_url = provider.build_url( + dvs.JSON_BASE_URL.format(provider._id, 'latest-published'), + key=provider.token + ) + aiohttpretty.register_json_uri( + 'GET', + draft_url, + status=200, + body=native_dataset_metadata + ) + aiohttpretty.register_json_uri( + 'GET', + published_url, + status=200, + body=native_dataset_metadata + ) - base = await provider.validate_v1_path('/') + base = await provider.validate_path('/') wb_path = await provider.revalidate_path(base, '/thefile.txt') assert wb_path.name == 'thefile.txt' @@ -125,23 +136,32 @@ class TestCRUD: @pytest.mark.asyncio @pytest.mark.aiohttpretty async def test_download(self, provider, native_dataset_metadata): + """ + """ path = '/21' url = provider.build_url(dvs.DOWN_BASE_URL, path, key=provider.token) - draft_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, 'latest'), - key=provider.token) - published_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, - 'latest-published'), - key=provider.token) + draft_url = provider.build_url( + dvs.JSON_BASE_URL.format(provider._id, 'latest'), + key=provider.token + ) + published_url = provider.build_url( + dvs.JSON_BASE_URL.format(provider._id, 'latest-published'), + key=provider.token + ) aiohttpretty.register_uri('GET', url, body=b'better', auto_length=True) - aiohttpretty.register_json_uri('GET', - draft_url, - status=200, - body=native_dataset_metadata) - aiohttpretty.register_json_uri('GET', - published_url, - status=200, - body=native_dataset_metadata) + aiohttpretty.register_json_uri( + 'GET', + draft_url, + status=200, + body=native_dataset_metadata + ) + aiohttpretty.register_json_uri( + 'GET', + published_url, + status=200, + body=native_dataset_metadata + ) path = await provider.validate_path(path) @@ -153,19 +173,31 @@ async def test_download(self, provider, native_dataset_metadata): @pytest.mark.asyncio @pytest.mark.aiohttpretty async def test_download_not_found(self, provider, native_dataset_metadata): + """ + """ path = '/21' url = provider.build_url(dvs.DOWN_BASE_URL, path, key=provider.token) aiohttpretty.register_uri('GET', url, status=404) - draft_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, 'latest'), - key=provider.token) - aiohttpretty.register_json_uri('GET', draft_url, status=200, body=native_dataset_metadata) - published_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, - 'latest-published'), - key=provider.token) - aiohttpretty.register_json_uri('GET', - published_url, - status=200, - body=native_dataset_metadata) + draft_url = provider.build_url( + dvs.JSON_BASE_URL.format(provider._id, 'latest'), + key=provider.token + ) + aiohttpretty.register_json_uri( + 'GET', + draft_url, + status=200, + body=native_dataset_metadata + ) + published_url = provider.build_url( + dvs.JSON_BASE_URL.format(provider._id, 'latest-published'), + key=provider.token + ) + aiohttpretty.register_json_uri( + 'GET', + published_url, + status=200, + body=native_dataset_metadata + ) path = await provider.validate_path(path) @@ -175,18 +207,29 @@ async def test_download_not_found(self, provider, native_dataset_metadata): @pytest.mark.asyncio @pytest.mark.aiohttpretty async def test_download_invalid_path(self, provider, native_dataset_metadata): + """ + """ path = '/50' - draft_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, - 'latest'), - key=provider.token) - aiohttpretty.register_json_uri('GET', draft_url, status=200, body=native_dataset_metadata) - published_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, - 'latest-published'), - key=provider.token) - aiohttpretty.register_json_uri('GET', - published_url, - status=200, - body=native_dataset_metadata) + draft_url = provider.build_url( + dvs.JSON_BASE_URL.format(provider._id, 'latest'), + key=provider.token + ) + aiohttpretty.register_json_uri( + 'GET', + draft_url, + status=200, + body=native_dataset_metadata + ) + published_url = provider.build_url( + dvs.JSON_BASE_URL.format(provider._id, 'latest-published'), + key=provider.token + ) + aiohttpretty.register_json_uri( + 'GET', + published_url, + status=200, + body=native_dataset_metadata + ) path = await provider.validate_path(path) @@ -197,6 +240,8 @@ async def test_download_invalid_path(self, provider, native_dataset_metadata): @pytest.mark.aiohttpretty async def test_upload_create(self, provider, file_stream, native_file_metadata, empty_native_dataset_metadata, native_dataset_metadata): + """ + """ path = '/thefile.txt' url = provider.build_url(dvs.EDIT_MEDIA_BASE_URL, 'study', provider.doi) aiohttpretty.register_uri('POST', url, status=201) diff --git a/tests/providers/dropbox/fixtures.py b/tests/providers/dropbox/fixtures.py index 78d3ad940..6fffd8709 100644 --- a/tests/providers/dropbox/fixtures.py +++ b/tests/providers/dropbox/fixtures.py @@ -30,7 +30,7 @@ def settings(): @pytest.fixture def provider_fixtures(): - # fixtures for testing validate_v1_path for root provider + # Fixtures for testing validate_path for root provider with open(os.path.join(os.path.dirname(__file__), 'fixtures/root_provider.json'), 'r') as fp: return json.load(fp) diff --git a/tests/providers/dropbox/test_provider.py b/tests/providers/dropbox/test_provider.py index 2a2335b0c..81de31dd3 100644 --- a/tests/providers/dropbox/test_provider.py +++ b/tests/providers/dropbox/test_provider.py @@ -39,7 +39,7 @@ class TestValidatePath: @pytest.mark.asyncio @pytest.mark.aiohttpretty @pytest.mark.parametrize('settings', [{'folder': '/'}]) - async def test_validate_v1_path_file(self, provider, provider_fixtures): + async def test_validate_path_file(self, provider, provider_fixtures): file_path = '/Photos/Getting_Started.pdf' data = {"path": file_path} metadata_url = provider.build_url('files', 'get_metadata') @@ -51,24 +51,20 @@ async def test_validate_v1_path_file(self, provider, provider_fixtures): ) try: - wb_path_v1 = await provider.validate_v1_path(file_path) + wb_path = await provider.validate_path(file_path) except Exception as exc: pytest.fail(str(exc)) - wb_path_v1 = None + wb_path = None with pytest.raises(core_exceptions.NotFoundError) as exc: - await provider.validate_v1_path(file_path + '/') + await provider.validate_path(file_path + '/') assert exc.value.code == HTTPStatus.NOT_FOUND - wb_path_v0 = await provider.validate_path(file_path) - - assert wb_path_v1 == wb_path_v0 - @pytest.mark.asyncio @pytest.mark.aiohttpretty @pytest.mark.parametrize('settings', [{'folder': '/'}]) - async def test_validate_v1_path_folder(self, provider, provider_fixtures): + async def test_validate_path_folder(self, provider, provider_fixtures): folder_path = '/Photos' data = {"path": folder_path} metadata_url = provider.build_url('files', 'get_metadata') @@ -80,20 +76,16 @@ async def test_validate_v1_path_folder(self, provider, provider_fixtures): ) try: - wb_path_v1 = await provider.validate_v1_path(folder_path + '/') + wb_path = await provider.validate_path(folder_path + '/') except Exception as exc: pytest.fail(str(exc)) - wb_path_v1 = None + wb_path = None with pytest.raises(core_exceptions.NotFoundError) as exc: - await provider.validate_v1_path(folder_path) + await provider.validate_path(folder_path) assert exc.value.code == HTTPStatus.NOT_FOUND - wb_path_v0 = await provider.validate_path(folder_path + '/') - - assert wb_path_v1 == wb_path_v0 - @pytest.mark.asyncio async def test_returns_path_obj(self, provider): path = await provider.validate_path('/thisisapath') @@ -113,8 +105,8 @@ async def test_with_folder(self, provider): assert provider.folder in path.full_path @pytest.mark.asyncio - async def test_validate_v1_path_base(self, provider): - path = await provider.validate_v1_path('/') + async def test_validate_path_base(self, provider): + path = await provider.validate_path('/') assert path.is_dir assert len(path.parts) == 1 diff --git a/tests/providers/figshare/test_provider.py b/tests/providers/figshare/test_provider.py index cdcc38aa5..210a58bc8 100644 --- a/tests/providers/figshare/test_provider.py +++ b/tests/providers/figshare/test_provider.py @@ -86,11 +86,11 @@ async def test_article_provider(self, article_settings, article_provider): assert article_provider.article_id == article_settings['container_id'] -class TestProjectV1ValidatePath: +class TestProjectValidatePath: @pytest.mark.asyncio @pytest.mark.aiohttpretty - async def test_validate_v1_path_folder_article(self, project_provider, root_provider_fixtures): + async def test_validate_path_folder_article(self, project_provider, root_provider_fixtures): item = root_provider_fixtures['folder_article_metadata'] file_id = str(item['id']) path = '/{}/'.format(file_id) @@ -106,7 +106,7 @@ async def test_validate_v1_path_folder_article(self, project_provider, root_prov params={'page': '2', 'page_size': str(MAX_PAGE_SIZE)}) aiohttpretty.register_json_uri('GET', article_url, body=item) - result = await project_provider.validate_v1_path(path) + result = await project_provider.validate_path(path) expected = FigsharePath('/{}/'.format(item['title']), _ids=(project_provider.container_id, file_id), folder=True, @@ -116,7 +116,7 @@ async def test_validate_v1_path_folder_article(self, project_provider, root_prov @pytest.mark.asyncio @pytest.mark.aiohttpretty - async def test_validate_v1_path_folder_article_bad_path(self, project_provider, + async def test_validate_path_folder_article_bad_path(self, project_provider, root_provider_fixtures): item = root_provider_fixtures['folder_article_metadata'] file_id = str(item['id']) @@ -134,14 +134,14 @@ async def test_validate_v1_path_folder_article_bad_path(self, project_provider, aiohttpretty.register_json_uri('GET', article_url, body=item) with pytest.raises(exceptions.NotFoundError) as e: - await project_provider.validate_v1_path(path) + await project_provider.validate_path(path) assert e.value.code == 404 assert aiohttpretty.has_call(method='GET', uri=article_url) @pytest.mark.asyncio @pytest.mark.aiohttpretty - async def test_validate_v1_path_folder_article_bad_type(self, project_provider, + async def test_validate_path_folder_article_bad_type(self, project_provider, root_provider_fixtures): item = root_provider_fixtures['folder_article_metadata'] file_id = str(item['id']) @@ -162,42 +162,42 @@ async def test_validate_v1_path_folder_article_bad_type(self, project_provider, aiohttpretty.register_json_uri('GET', article_url, body=item) with pytest.raises(exceptions.NotFoundError) as e: - await project_provider.validate_v1_path(path) + await project_provider.validate_path(path) assert e.value.code == 404 assert aiohttpretty.has_call(method='GET', uri=article_url) @pytest.mark.asyncio @pytest.mark.aiohttpretty - async def test_project_validate_v1_path_root(self, project_provider): + async def test_project_validate_path_root(self, project_provider): path = '/' - result = await project_provider.validate_v1_path(path) + result = await project_provider.validate_path(path) expected = FigsharePath(path, _ids=('', ), folder=True, is_public=False) assert result == expected @pytest.mark.asyncio @pytest.mark.aiohttpretty - async def test_article_validate_v1_path_invalid_path(self, article_provider): + async def test_article_validate_path_invalid_path(self, article_provider): with pytest.raises(exceptions.InvalidPathError) as e: - await article_provider.validate_v1_path('/this/is/an/invalid/path') + await article_provider.validate_path('/this/is/an/invalid/path') assert e.value.code == 400 @pytest.mark.asyncio @pytest.mark.aiohttpretty - async def test_validate_v1_path_invalid_path(self, project_provider): + async def test_validate_path_invalid_path(self, project_provider): path = 'whatever' with pytest.raises(exceptions.InvalidPathError) as e: - await project_provider.validate_v1_path(path) + await project_provider.validate_path(path) assert e.value.code == 400 @pytest.mark.asyncio @pytest.mark.aiohttpretty - async def test_validate_v1_path_file_article(self, project_provider, root_provider_fixtures): + async def test_validate_path_file_article(self, project_provider, root_provider_fixtures): file_item = root_provider_fixtures['file_metadata'] item = root_provider_fixtures['file_article_metadata'] file_id = str(item['files'][0]['id']) @@ -217,7 +217,7 @@ async def test_validate_v1_path_file_article(self, project_provider, root_provid aiohttpretty.register_json_uri('GET', article_url, body=file_item) - result = await project_provider.validate_v1_path(path) + result = await project_provider.validate_path(path) expected = FigsharePath('/{}/{}'.format(item['title'], file_item['name']), _ids=(project_provider.container_id, file_id), folder=False, is_public=False) @@ -226,7 +226,7 @@ async def test_validate_v1_path_file_article(self, project_provider, root_provid @pytest.mark.asyncio @pytest.mark.aiohttpretty - async def test_validate_v1_path_file_article_public(self, project_provider, + async def test_validate_path_file_article_public(self, project_provider, root_provider_fixtures): file_item = root_provider_fixtures['file_metadata_public'] item = root_provider_fixtures['file_article_metadata'] @@ -247,7 +247,7 @@ async def test_validate_v1_path_file_article_public(self, project_provider, aiohttpretty.register_json_uri('GET', article_url, body=file_item) - result = await project_provider.validate_v1_path(path) + result = await project_provider.validate_path(path) expected = FigsharePath('/{}/{}'.format(item['title'], file_item['name']), _ids=(project_provider.container_id, file_id), folder=False, is_public=False) @@ -256,7 +256,7 @@ async def test_validate_v1_path_file_article_public(self, project_provider, @pytest.mark.asyncio @pytest.mark.aiohttpretty - async def test_validate_v1_path_file_article_bad_path(self, project_provider, + async def test_validate_path_file_article_bad_path(self, project_provider, root_provider_fixtures): file_item = root_provider_fixtures['file_metadata'] item = root_provider_fixtures['file_article_metadata'] @@ -278,26 +278,26 @@ async def test_validate_v1_path_file_article_bad_path(self, project_provider, aiohttpretty.register_json_uri('GET', article_url, body=file_item) with pytest.raises(exceptions.NotFoundError) as e: - await project_provider.validate_v1_path(path) + await project_provider.validate_path(path) assert e.value.code == 404 -class TestArticleV1ValidatePath: +class TestArticleValidatePath: @pytest.mark.asyncio @pytest.mark.aiohttpretty - async def test_article_validate_v1_path_root(self, article_provider): + async def test_article_validate_path_root(self, article_provider): path = '/' - result = await article_provider.validate_v1_path(path) + result = await article_provider.validate_path(path) expected = FigsharePath(path, _ids=('', ), folder=True, is_public=False) assert result == expected @pytest.mark.asyncio @pytest.mark.aiohttpretty - async def test_article_validate_v1_path(self, article_provider, root_provider_fixtures): + async def test_article_validate_path(self, article_provider, root_provider_fixtures): item = root_provider_fixtures['file_metadata'] file_id = item['id'] path = '/' + str(file_id) @@ -306,7 +306,7 @@ async def test_article_validate_v1_path(self, article_provider, root_provider_fi aiohttpretty.register_json_uri('GET', url, body=item) - result = await article_provider.validate_v1_path(path) + result = await article_provider.validate_path(path) expected = FigsharePath('/' + item['name'], _ids=('', file_id), folder=False, is_public=False) diff --git a/tests/providers/filesystem/test_provider.py b/tests/providers/filesystem/test_provider.py index 132358a5d..aa5fbc753 100644 --- a/tests/providers/filesystem/test_provider.py +++ b/tests/providers/filesystem/test_provider.py @@ -52,37 +52,29 @@ def setup_filesystem(provider): class TestValidatePath: @pytest.mark.asyncio - async def test_validate_v1_path_file(self, provider): + async def test_validate_path_file(self, provider): try: - wb_path_v1 = await provider.validate_v1_path('/flower.jpg') + wb_path = await provider.validate_path('/flower.jpg') except Exception as exc: pytest.fail(str(exc)) with pytest.raises(exceptions.NotFoundError) as exc: - await provider.validate_v1_path('/flower.jpg/') + await provider.validate_path('/flower.jpg/') assert exc.value.code == client.NOT_FOUND - wb_path_v0 = await provider.validate_path('/flower.jpg') - - assert wb_path_v1 == wb_path_v0 - @pytest.mark.asyncio - async def test_validate_v1_path_folder(self, provider): + async def test_validate_path_folder(self, provider): try: - wb_path_v1 = await provider.validate_v1_path('/subfolder/') + wb_path = await provider.validate_path('/subfolder/') except Exception as exc: pytest.fail(str(exc)) with pytest.raises(exceptions.NotFoundError) as exc: - await provider.validate_v1_path('/subfolder') + await provider.validate_path('/subfolder') assert exc.value.code == client.NOT_FOUND - wb_path_v0 = await provider.validate_path('/subfolder/') - - assert wb_path_v1 == wb_path_v0 - class TestCRUD: diff --git a/tests/providers/github/fixtures.py b/tests/providers/github/fixtures.py index d3159a877..d8f80ca62 100644 --- a/tests/providers/github/fixtures.py +++ b/tests/providers/github/fixtures.py @@ -6,7 +6,7 @@ @pytest.fixture def provider_fixtures(): - # fixtures for testing validate_v1_path for root provider + # Fixtures for testing validate_path for root provider with open(os.path.join(os.path.dirname(__file__), 'fixtures/root_provider.json'), 'r') as fp: return json.load(fp) diff --git a/tests/providers/github/test_provider.py b/tests/providers/github/test_provider.py index b6688495d..3dd3b21fa 100644 --- a/tests/providers/github/test_provider.py +++ b/tests/providers/github/test_provider.py @@ -120,7 +120,7 @@ class TestValidatePath: @pytest.mark.asyncio @pytest.mark.aiohttpretty - async def test_validate_v1_path_file(self, provider, provider_fixtures): + async def test_validate_path_file(self, provider, provider_fixtures): branch_url = provider.build_repo_url('branches', provider.default_branch) tree_url = provider.build_repo_url( 'git', 'trees', @@ -135,10 +135,10 @@ async def test_validate_v1_path_file(self, provider, provider_fixtures): blob_path = 'file.txt' - result = await provider.validate_v1_path('/' + blob_path) + result = await provider.validate_path('/' + blob_path) with pytest.raises(exceptions.NotFoundError) as exc: - await provider.validate_v1_path('/' + blob_path + '/') + await provider.validate_path('/' + blob_path + '/') expected = GitHubPath('/' + blob_path, _ids=[(provider.default_branch, '')]) @@ -147,11 +147,11 @@ async def test_validate_v1_path_file(self, provider, provider_fixtures): @pytest.mark.asyncio @pytest.mark.aiohttpretty - async def test_validate_v1_path_root(self, provider): + async def test_validate_path_root(self, provider): path = '/' - result = await provider.validate_v1_path(path, branch=provider.default_branch) - no_branch_result = await provider.validate_v1_path(path) + result = await provider.validate_path(path, branch=provider.default_branch) + no_branch_result = await provider.validate_path(path) expected = GitHubPath(path, _ids=[(provider.default_branch, '')]) @@ -160,7 +160,7 @@ async def test_validate_v1_path_root(self, provider): @pytest.mark.asyncio @pytest.mark.aiohttpretty - async def test_validate_v1_path_folder(self, provider, provider_fixtures): + async def test_validate_path_folder(self, provider, provider_fixtures): branch_url = provider.build_repo_url('branches', provider.default_branch) tree_url = provider.build_repo_url( 'git', 'trees', @@ -182,10 +182,10 @@ async def test_validate_v1_path_folder(self, provider, provider_fixtures): (provider.default_branch, None)] ) - result = await provider.validate_v1_path('/' + tree_path + '/') + result = await provider.validate_path('/' + tree_path + '/') with pytest.raises(exceptions.NotFoundError) as exc: - await provider.validate_v1_path('/' + tree_path) + await provider.validate_path('/' + tree_path) assert exc.value.code == client.NOT_FOUND assert result == expected @@ -195,7 +195,7 @@ async def test_validate_v1_path_folder(self, provider, provider_fixtures): async def test_reject_multiargs(self, provider): with pytest.raises(exceptions.InvalidParameters) as exc: - await provider.validate_v1_path('/foo', ref=['bar', 'baz']) + await provider.validate_path('/foo', ref=['bar', 'baz']) assert exc.value.code == client.BAD_REQUEST diff --git a/tests/providers/gitlab/test_provider.py b/tests/providers/gitlab/test_provider.py index 8e4c20781..4b4a85bbd 100644 --- a/tests/providers/gitlab/test_provider.py +++ b/tests/providers/gitlab/test_provider.py @@ -72,7 +72,7 @@ class TestValidatePath: @pytest.mark.asyncio @pytest.mark.aiohttpretty - async def test_validate_v1_root(self, provider): + async def test_validate_root(self, provider): path = '/' default_branch_url = 'http://base.url/api/v4/projects/123' body = fixtures.default_branches['default_branch'] @@ -82,7 +82,7 @@ async def test_validate_v1_root(self, provider): commit_sha_body = fixtures.default_branches['get_commit_sha'] aiohttpretty.register_json_uri('GET', commit_sha_url, body=commit_sha_body, status=200) - root_path = await provider.validate_v1_path(path) + root_path = await provider.validate_path(path) assert root_path.is_dir assert root_path.is_root @@ -95,12 +95,12 @@ async def test_validate_v1_root(self, provider): @pytest.mark.asyncio @pytest.mark.aiohttpretty - async def test_validate_v1_root_by_branch(self, provider): + async def test_validate_root_by_branch(self, provider): commit_sha_url = 'http://base.url/api/v4/projects/123/repository/branches/otherbranch' commit_sha_body = fixtures.default_branches['get_commit_sha'] aiohttpretty.register_json_uri('GET', commit_sha_url, body=commit_sha_body, status=200) - root_path = await provider.validate_v1_path('/', branch='otherbranch') + root_path = await provider.validate_path('/', branch='otherbranch') assert root_path.is_dir assert root_path.is_root @@ -113,9 +113,9 @@ async def test_validate_v1_root_by_branch(self, provider): @pytest.mark.asyncio @pytest.mark.aiohttpretty - async def test_validate_v1_root_by_commit_sha(self, provider): + async def test_validate_root_by_commit_sha(self, provider): path = '/' - root_path = await provider.validate_v1_path(path, commitSha='a1b2c3d4') + root_path = await provider.validate_path(path, commitSha='a1b2c3d4') assert root_path.is_dir assert root_path.is_root @@ -128,9 +128,9 @@ async def test_validate_v1_root_by_commit_sha(self, provider): @pytest.mark.asyncio @pytest.mark.aiohttpretty - async def test_validate_v1_root_by_revision_sha(self, provider): + async def test_validate_root_by_revision_sha(self, provider): path = '/' - root_path = await provider.validate_v1_path(path, revision='a1b2c3d4') + root_path = await provider.validate_path(path, revision='a1b2c3d4') assert root_path.is_dir assert root_path.is_root @@ -143,12 +143,12 @@ async def test_validate_v1_root_by_revision_sha(self, provider): @pytest.mark.asyncio @pytest.mark.aiohttpretty - async def test_validate_v1_root_by_revision_branch(self, provider): + async def test_validate_root_by_revision_branch(self, provider): commit_sha_url = 'http://base.url/api/v4/projects/123/repository/branches/otherbranch' commit_sha_body = fixtures.default_branches['get_commit_sha'] aiohttpretty.register_json_uri('GET', commit_sha_url, body=commit_sha_body, status=200) - root_path = await provider.validate_v1_path('/', revision='otherbranch') + root_path = await provider.validate_path('/', revision='otherbranch') assert root_path.is_dir assert root_path.is_root @@ -161,14 +161,14 @@ async def test_validate_v1_root_by_revision_branch(self, provider): @pytest.mark.asyncio @pytest.mark.aiohttpretty - async def test_validate_v1_path_file(self, provider): + async def test_validate_path_file(self, provider): path = '/folder1/file1' url = ('http://base.url/api/v4/projects/123/repository/tree' '?path=folder1/&page=1&per_page={}&ref=a1b2c3d4'.format(provider.MAX_PAGE_SIZE)) aiohttpretty.register_json_uri('GET', url, body=fixtures.simple_tree()) try: - file_path = await provider.validate_v1_path(path, commitSha='a1b2c3d4', + file_path = await provider.validate_path(path, commitSha='a1b2c3d4', branch='master') except Exception as exc: pytest.fail(str(exc)) @@ -199,18 +199,18 @@ async def test_validate_v1_path_file(self, provider): } with pytest.raises(exceptions.NotFoundError) as exc: - await provider.validate_v1_path(path + '/', commitSha='a1b2c3d4') + await provider.validate_path(path + '/', commitSha='a1b2c3d4') @pytest.mark.asyncio @pytest.mark.aiohttpretty - async def test_validate_v1_path_folder(self, provider): + async def test_validate_path_folder(self, provider): path = '/files/lfs/' url = ('http://base.url/api/v4/projects/123/repository/tree' '?path=files/&page=1&per_page={}&ref=a1b2c3d4'.format(provider.MAX_PAGE_SIZE)) aiohttpretty.register_json_uri('GET', url, body=fixtures.subfolder_tree()) try: - folder_path = await provider.validate_v1_path(path, commitSha='a1b2c3d4', + folder_path = await provider.validate_path(path, commitSha='a1b2c3d4', branch='master') except Exception as exc: pytest.fail(str(exc)) @@ -237,7 +237,7 @@ async def test_no_such_repository(self, provider): aiohttpretty.register_json_uri('GET', default_branch_url, body={}, status=404) with pytest.raises(exceptions.NotFoundError) as exc: - root_path = await provider.validate_v1_path(path) + root_path = await provider.validate_path(path) assert exc.value.code == 404 @pytest.mark.asyncio @@ -249,7 +249,7 @@ async def test_uninitialized_repository(self, provider): aiohttpretty.register_json_uri('GET', default_branch_url, body={"default_branch": None}) with pytest.raises(exceptions.UninitializedRepositoryError) as exc: - root_path = await provider.validate_v1_path(path) + root_path = await provider.validate_path(path) assert exc.value.code == 400 diff --git a/tests/providers/googlecloud/test_provider.py b/tests/providers/googlecloud/test_provider.py index f138233a3..62dd031ed 100644 --- a/tests/providers/googlecloud/test_provider.py +++ b/tests/providers/googlecloud/test_provider.py @@ -12,24 +12,28 @@ from waterbutler.providers.googlecloud.metadata import GoogleCloudFileMetadata from waterbutler.providers.googlecloud import utils, settings, GoogleCloudProvider -from tests.providers.googlecloud.fixtures.providers import (mock_auth, - mock_auth_2, - mock_creds, - mock_creds_2, - mock_settings, - mock_settings_2) - -from tests.providers.googlecloud.fixtures.files import (file_raw, - file_name, - file_wb_path, - file_obj_name, - meta_file_raw, - meta_file_parsed, - meta_file_upload_raw, - meta_file_copy_raw, - file_2_wb_path, - file_2_obj_name, - file_2_copy_obj_name) +from tests.providers.googlecloud.fixtures.providers import ( + mock_auth, + mock_auth_2, + mock_creds, + mock_creds_2, + mock_settings, + mock_settings_2 +) + +from tests.providers.googlecloud.fixtures.files import ( + file_raw, + file_name, + file_wb_path, + file_obj_name, + meta_file_raw, + meta_file_parsed, + meta_file_upload_raw, + meta_file_copy_raw, + file_2_wb_path, + file_2_obj_name, + file_2_copy_obj_name +) from tests.providers.googlecloud.fixtures.folders import folder_wb_path, folder_obj_name @@ -73,14 +77,14 @@ async def test_provider_init(self, mock_provider): class TestValidatePath: @pytest.mark.asyncio - async def test_validate_v1_path_file(self, mock_provider, file_wb_path): + async def test_validate_path_file(self, mock_provider, file_wb_path): file_path = '/{}'.format(file_wb_path.path) assert file_path.startswith('/') and not file_path.endswith('/') wb_path = await mock_provider.validate_path(file_path) assert wb_path == file_wb_path @pytest.mark.asyncio - async def test_validate_v1_path_folder(self, mock_provider, folder_wb_path): + async def test_validate_path_folder(self, mock_provider, folder_wb_path): folder_path = '/{}'.format(folder_wb_path.path) assert folder_path.startswith('/') and folder_path.endswith('/') wb_path = await mock_provider.validate_path(folder_path) diff --git a/tests/providers/googledrive/fixtures.py b/tests/providers/googledrive/fixtures.py index 916718f65..d5511c0cf 100644 --- a/tests/providers/googledrive/fixtures.py +++ b/tests/providers/googledrive/fixtures.py @@ -5,7 +5,7 @@ @pytest.fixture def root_provider_fixtures(): - # fixtures for testing validate_v1_path for root provider + # fixtures for testing validate_path for root provider with open(os.path.join(os.path.dirname(__file__), 'fixtures/root_provider.json'), 'r') as fp: return json.load(fp) diff --git a/tests/providers/googledrive/test_provider.py b/tests/providers/googledrive/test_provider.py index 6c80a507e..b31d16ce4 100644 --- a/tests/providers/googledrive/test_provider.py +++ b/tests/providers/googledrive/test_provider.py @@ -203,7 +203,7 @@ class TestValidatePath: @pytest.mark.asyncio @pytest.mark.aiohttpretty - async def test_validate_v1_path_file(self, provider, search_for_file_response, + async def test_validate_path_file(self, provider, search_for_file_response, actual_file_response, no_folder_response): file_name = 'file.txt' file_id = '1234ideclarethumbwar' @@ -225,22 +225,19 @@ async def test_validate_v1_path_file(self, provider, search_for_file_response, aiohttpretty.register_json_uri('GET', specific_url, body=actual_file_response) try: - wb_path_v1 = await provider.validate_v1_path('/' + file_name) + wb_path = await provider.validate_path('/' + file_name) except Exception as exc: pytest.fail(str(exc)) with pytest.raises(exceptions.NotFoundError) as exc: - await provider.validate_v1_path('/' + file_name + '/') + await provider.validate_path('/' + file_name + '/') assert exc.value.code == client.NOT_FOUND - wb_path_v0 = await provider.validate_path('/' + file_name) - - assert wb_path_v1 == wb_path_v0 @pytest.mark.asyncio @pytest.mark.aiohttpretty - async def test_validate_v1_path_folder(self, provider, search_for_folder_response, + async def test_validate_path_folder(self, provider, search_for_folder_response, actual_folder_response, no_file_response): folder_name = 'foofolder' folder_id = 'whyis6afraidof7' @@ -262,25 +259,22 @@ async def test_validate_v1_path_folder(self, provider, search_for_folder_respons aiohttpretty.register_json_uri('GET', specific_url, body=actual_folder_response) try: - wb_path_v1 = await provider.validate_v1_path('/' + folder_name + '/') + wb_path = await provider.validate_path('/' + folder_name + '/') except Exception as exc: pytest.fail(str(exc)) with pytest.raises(exceptions.NotFoundError) as exc: - await provider.validate_v1_path('/' + folder_name) + await provider.validate_path('/' + folder_name) assert exc.value.code == client.NOT_FOUND - wb_path_v0 = await provider.validate_path('/' + folder_name + '/') - - assert wb_path_v1 == wb_path_v0 @pytest.mark.asyncio @pytest.mark.aiohttpretty - async def test_validate_v1_path_root(self, provider): + async def test_validate_path_root(self, provider): path = '/' - result = await provider.validate_v1_path(path) + result = await provider.validate_path(path) expected = GoogleDrivePath('/', _ids=[provider.folder['id']], folder=True) assert result == expected diff --git a/tests/providers/onedrive/fixtures.py b/tests/providers/onedrive/fixtures.py index 8fd8b29b3..4e2b1d376 100644 --- a/tests/providers/onedrive/fixtures.py +++ b/tests/providers/onedrive/fixtures.py @@ -5,14 +5,14 @@ @pytest.fixture def root_provider_fixtures(): - # fixtures for testing validate_v1_path for root provider + # fixtures for testing validate_path for root provider with open(os.path.join(os.path.dirname(__file__), 'fixtures/root_provider.json'), 'r') as fp: return json.load(fp) @pytest.fixture def subfolder_provider_fixtures(): - # fixtures for testing validate_v1_path for subfolder provider + # fixtures for testing validate_path for subfolder provider with open(os.path.join(os.path.dirname(__file__), 'fixtures/subfolder_provider.json'), 'r') as fp: return json.load(fp) diff --git a/tests/providers/onedrive/test_provider.py b/tests/providers/onedrive/test_provider.py index 22e4519ae..3bb12dc9a 100644 --- a/tests/providers/onedrive/test_provider.py +++ b/tests/providers/onedrive/test_provider.py @@ -78,20 +78,17 @@ class TestRootProviderValidatePath: @pytest.mark.asyncio @pytest.mark.aiohttpretty - async def test_validate_v1_path_root(self, root_provider): + async def test_validate_path_root(self, root_provider): try: - wb_path_v1 = await root_provider.validate_v1_path('/') + wb_path = await root_provider.validate_path('/') except Exception as exc: pytest.fail(str(exc)) - wb_path_v0 = await root_provider.validate_path('/') - - assert wb_path_v1 == wb_path_v0 - assert wb_path_v1.identifier == 'root' + assert wb_path.identifier == 'root' @pytest.mark.aiohttpretty @pytest.mark.asyncio - async def test_validate_v1_path_file(self, root_provider, root_provider_fixtures): + async def test_validate_path_file(self, root_provider, root_provider_fixtures): file_id = root_provider_fixtures['file_id'] file_metadata = root_provider_fixtures['file_metadata'] @@ -100,25 +97,20 @@ async def test_validate_v1_path_file(self, root_provider, root_provider_fixtures file_path = '/{}'.format(file_id) try: - wb_path_v1 = await root_provider.validate_v1_path(file_path) + wb_path = await root_provider.validate_path(file_path) except Exception as exc: pytest.fail(str(exc)) file_name = '/{}'.format(file_metadata['name']) - assert str(wb_path_v1) == file_name - assert wb_path_v1.identifier == file_id - - wb_path_v0 = await root_provider.validate_path(file_path) - assert str(wb_path_v0) == file_name - - assert wb_path_v1 == wb_path_v0 + assert str(wb_path) == file_name + assert wb_path.identifier == file_id with pytest.raises(exceptions.NotFoundError) as exc: - await root_provider.validate_v1_path(file_path + '/') + await root_provider.validate_path(file_path + '/') @pytest.mark.aiohttpretty @pytest.mark.asyncio - async def test_validate_v1_path_folder(self, root_provider, root_provider_fixtures): + async def test_validate_path_folder(self, root_provider, root_provider_fixtures): folder_id = root_provider_fixtures['folder_id'] folder_metadata = root_provider_fixtures['folder_metadata'] @@ -128,40 +120,32 @@ async def test_validate_v1_path_folder(self, root_provider, root_provider_fixtur folder_path = '/{}/'.format(folder_id) folder_name = '/{}/'.format(folder_metadata['name']) try: - wb_path_v1 = await root_provider.validate_v1_path(folder_path) + wb_path = await root_provider.validate_path(folder_path) except Exception as exc: pytest.fail(str(exc)) - assert str(wb_path_v1) == folder_name - assert wb_path_v1.identifier == folder_id - - wb_path_v0 = await root_provider.validate_path(folder_path) - assert str(wb_path_v0) == folder_name - - assert wb_path_v1 == wb_path_v0 + assert str(wb_path) == folder_name + assert wb_path.identifier == folder_id with pytest.raises(exceptions.NotFoundError) as exc: - await root_provider.validate_v1_path(folder_path.rstrip('/')) + await root_provider.validate_path(folder_path.rstrip('/')) class TestSubfolderProviderValidatePath: @pytest.mark.asyncio @pytest.mark.aiohttpretty - async def test_validate_v1_path_root(self, subfolder_provider, subfolder_provider_fixtures): + async def test_validate_path_root(self, subfolder_provider, subfolder_provider_fixtures): try: - wb_path_v1 = await subfolder_provider.validate_v1_path('/') + wb_path = await subfolder_provider.validate_path('/') except Exception as exc: pytest.fail(str(exc)) - wb_path_v0 = await subfolder_provider.validate_path('/') - - assert wb_path_v1 == wb_path_v0 - assert wb_path_v1.identifier == subfolder_provider_fixtures['root_id'] + assert wb_path.identifier == subfolder_provider_fixtures['root_id'] @pytest.mark.aiohttpretty @pytest.mark.asyncio - async def test_validate_v1_path_folder(self, subfolder_provider, subfolder_provider_fixtures): + async def test_validate_path_folder(self, subfolder_provider, subfolder_provider_fixtures): folder_id = subfolder_provider_fixtures['folder_id'] folder_metadata = subfolder_provider_fixtures['folder_metadata'] @@ -171,24 +155,19 @@ async def test_validate_v1_path_folder(self, subfolder_provider, subfolder_provi folder_path = '/{}/'.format(folder_id) folder_name = '/{}/'.format(folder_metadata['name']) try: - wb_path_v1 = await subfolder_provider.validate_v1_path(folder_path) + wb_path = await subfolder_provider.validate_path(folder_path) except Exception as exc: pytest.fail(str(exc)) - assert str(wb_path_v1) == folder_name - assert wb_path_v1.identifier == folder_id - - wb_path_v0 = await subfolder_provider.validate_path(folder_path) - assert str(wb_path_v0) == folder_name - - assert wb_path_v1 == wb_path_v0 + assert str(wb_path) == folder_name + assert wb_path.identifier == folder_id with pytest.raises(exceptions.NotFoundError) as exc: - await subfolder_provider.validate_v1_path(folder_path.rstrip('/')) + await subfolder_provider.validate_path(folder_path.rstrip('/')) @pytest.mark.aiohttpretty @pytest.mark.asyncio - async def test_validate_v1_path_file_is_child(self, subfolder_provider, + async def test_validate_path_file_is_child(self, subfolder_provider, subfolder_provider_fixtures): """file is immediate child of provider base folder""" file_id = subfolder_provider_fixtures['file_id'] @@ -200,24 +179,19 @@ async def test_validate_v1_path_file_is_child(self, subfolder_provider, file_path = '/{}'.format(file_id) file_name = '/{}'.format(file_metadata['name']) try: - wb_path_v1 = await subfolder_provider.validate_v1_path(file_path) + wb_path = await subfolder_provider.validate_path(file_path) except Exception as exc: pytest.fail(str(exc)) - assert str(wb_path_v1) == file_name - assert wb_path_v1.identifier == file_id - - wb_path_v0 = await subfolder_provider.validate_path(file_path) - assert str(wb_path_v0) == file_name - - assert wb_path_v1 == wb_path_v0 + assert str(wb_path) == file_name + assert wb_path.identifier == file_id with pytest.raises(exceptions.NotFoundError) as exc: - await subfolder_provider.validate_v1_path(file_path + '/') + await subfolder_provider.validate_path(file_path + '/') @pytest.mark.aiohttpretty @pytest.mark.asyncio - async def test_validate_v1_path_file_is_grandchild(self, subfolder_provider, + async def test_validate_path_file_is_grandchild(self, subfolder_provider, subfolder_provider_fixtures): """file is *not* immediate child of provider base folder""" subfile_id = subfolder_provider_fixtures['subfile_id'] @@ -235,19 +209,15 @@ async def test_validate_v1_path_file_is_grandchild(self, subfolder_provider, subfile_name = '/{}/{}'.format(subfolder_provider_fixtures['folder_metadata']['name'], subfile_metadata['name']) try: - wb_path_v1 = await subfolder_provider.validate_v1_path(subfile_path) + wb_path = await subfolder_provider.validate_path(subfile_path) except Exception as exc: pytest.fail(str(exc)) - assert str(wb_path_v1) == subfile_name - - wb_path_v0 = await subfolder_provider.validate_path(subfile_path) - assert str(wb_path_v0) == subfile_name + assert str(wb_path) == subfile_name - assert wb_path_v1 == wb_path_v0 @pytest.mark.aiohttpretty @pytest.mark.asyncio - async def test_validate_v1_path_file_is_outside_root(self, subfolder_provider, + async def test_validate_path_file_is_outside_root(self, subfolder_provider, subfolder_provider_fixtures): """file is outside of the base storage root""" file_id = subfolder_provider_fixtures['outside_file_id'] @@ -263,7 +233,7 @@ async def test_validate_v1_path_file_is_outside_root(self, subfolder_provider, file_path = '/{}'.format(file_id) with pytest.raises(exceptions.NotFoundError) as exc: - await subfolder_provider.validate_v1_path(file_path) + await subfolder_provider.validate_path(file_path) with pytest.raises(exceptions.NotFoundError) as exc: await subfolder_provider.validate_path(file_path) diff --git a/tests/providers/osfstorage/fixtures.py b/tests/providers/osfstorage/fixtures.py index da2f79cd9..feb5573f0 100644 --- a/tests/providers/osfstorage/fixtures.py +++ b/tests/providers/osfstorage/fixtures.py @@ -172,7 +172,7 @@ def provider_and_mock(monkeypatch, auth, credentials, settings): mock_provider.upload = utils.MockCoroutine() mock_provider.download = utils.MockCoroutine() mock_provider.metadata = utils.MockCoroutine() - mock_provider.validate_v1_path = utils.MockCoroutine() + mock_provider.validate_path = utils.MockCoroutine() mock_provider._children_metadata = utils.MockCoroutine() mock_make_provider = mock.Mock(return_value=mock_provider) @@ -190,7 +190,7 @@ def provider_and_mock2(monkeypatch, auth, credentials, settings): mock_provider.upload = utils.MockCoroutine() mock_provider.download = utils.MockCoroutine() mock_provider.metadata = utils.MockCoroutine() - mock_provider.validate_v1_path = utils.MockCoroutine() + mock_provider.validate_path = utils.MockCoroutine() mock_provider._children_metadata = utils.MockCoroutine() mock_make_provider = mock.Mock(return_value=mock_provider) diff --git a/tests/providers/osfstorage/test_provider.py b/tests/providers/osfstorage/test_provider.py index d5b0b9748..054547ddd 100644 --- a/tests/providers/osfstorage/test_provider.py +++ b/tests/providers/osfstorage/test_provider.py @@ -294,7 +294,7 @@ async def test_intra_copy_folder(self, provider_and_mock, provider_and_mock2, dest_provider, dest_mock = provider_and_mock2 dest_mock.nid = 'abcde' dest_mock._children_metadata = utils.MockCoroutine(return_value=folder_children_metadata) - dest_mock.validate_v1_path = utils.MockCoroutine( + dest_mock.validate_path = utils.MockCoroutine( return_value=WaterButlerPath('/folder1/', _ids=('rootId', 'folder1')) ) @@ -322,10 +322,10 @@ async def test_intra_copy_folder(self, provider_and_mock, provider_and_mock2, assert isinstance(folder_meta, OsfStorageFolderMetadata) assert len(folder_meta.children) == 4 dest_mock._children_metadata.assert_called_once_with(WaterButlerPath('/folder1/')) - assert dest_mock.validate_v1_path.call_count == 1 + assert dest_mock.validate_path.call_count == 1 src_mock._children_metadata.assert_not_called() - src_mock.validate_v1_path.assert_not_called() + src_mock.validate_path.assert_not_called() @pytest.mark.asyncio @@ -368,7 +368,7 @@ async def test_intra_copy_file_overwrite(self, provider_and_mock, provider_and_m dest_provider, dest_mock = provider_and_mock2 dest_mock.nid = 'abcde' - dest_mock.validate_v1_path = utils.MockCoroutine( + dest_mock.validate_path = utils.MockCoroutine( return_value=WaterButlerPath('/file', _ids=('rootId', 'fileId')) ) @@ -411,7 +411,7 @@ async def test_intra_move_folder(self, provider_and_mock, provider_and_mock2, dest_provider, dest_mock = provider_and_mock2 dest_mock.nid = 'abcde' dest_mock._children_metadata = utils.MockCoroutine(return_value=folder_children_metadata) - dest_mock.validate_v1_path = utils.MockCoroutine( + dest_mock.validate_path = utils.MockCoroutine( return_value=WaterButlerPath('/folder1/', _ids=('rootId', 'folder1')) ) @@ -438,10 +438,10 @@ async def test_intra_move_folder(self, provider_and_mock, provider_and_mock2, assert isinstance(folder_meta, OsfStorageFolderMetadata) assert len(folder_meta.children) == 4 dest_mock._children_metadata.assert_called_once_with(WaterButlerPath('/folder1/')) - assert dest_mock.validate_v1_path.call_count == 1 + assert dest_mock.validate_path.call_count == 1 src_mock._children_metadata.assert_not_called() - src_mock.validate_v1_path.assert_not_called() + src_mock.validate_path.assert_not_called() @pytest.mark.asyncio @pytest.mark.aiohttpretty @@ -519,7 +519,6 @@ class TestValidatePath: @pytest.mark.aiohttpretty async def test_validate_path_root(self, provider, root_path, mock_time): assert root_path == await provider.validate_path('/') - assert root_path == await provider.validate_v1_path('/') @pytest.mark.asyncio @pytest.mark.aiohttpretty @@ -530,16 +529,14 @@ async def test_validate_path_file(self, provider, file_lineage, mock_time): aiohttpretty.register_json_uri('GET', url, params=params, status=200, body=file_lineage) with pytest.raises(exceptions.NotFoundError) as exc: - await provider.validate_v1_path('/' + file_id + '/') + await provider.validate_path('/' + file_id + '/') assert exc.value.code == client.NOT_FOUND - wb_path_v0 = await provider.validate_path('/' + file_id) - wb_path_v1 = await provider.validate_v1_path('/' + file_id) + wb_path = await provider.validate_path('/' + file_id) expected = WaterButlerPath('/doc.rst') - assert wb_path_v0 == expected - assert wb_path_v1 == expected + assert wb_path == expected @pytest.mark.asyncio @pytest.mark.aiohttpretty @@ -550,14 +547,10 @@ async def test_validate_path_folder(self, provider, folder_lineage, mock_time): aiohttpretty.register_json_uri('GET', url, params=params, status=200, body=folder_lineage) with pytest.raises(exceptions.NotFoundError): - await provider.validate_v1_path('/' + folder_id) + await provider.validate_path('/' + folder_id) - wb_path_v0 = await provider.validate_path('/' + folder_id) - wb_path_v1 = await provider.validate_v1_path('/' + folder_id + '/') - - expected = WaterButlerPath('/New Folder/') - assert wb_path_v0 == expected - assert wb_path_v1 == expected + wb_path = await provider.validate_path('/' + folder_id + '/') + assert wb_path == WaterButlerPath('/New Folder/') @pytest.mark.asyncio @pytest.mark.aiohttpretty @@ -568,11 +561,11 @@ async def test_validate_path_404s(self, provider, file_lineage, mock_time): aiohttpretty.register_json_uri('GET', url, params=params, status=404, body=file_lineage) with pytest.raises(exceptions.UnhandledProviderError): - await provider.validate_v1_path('/' + file_id) + await provider.validate_path('/' + file_id) - wb_path_v0 = await provider.validate_path('/' + file_id) + wb_path = await provider.validate_path('/' + file_id) - assert wb_path_v0 == WaterButlerPath(file_lineage['data'][0]['path'], prepend=None) + assert wb_path == WaterButlerPath(file_lineage['data'][0]['path'], prepend=None) @pytest.mark.asyncio @pytest.mark.aiohttpretty @@ -613,10 +606,10 @@ async def test_validate_path_nested(self, provider, file_lineage, folder_lineage url, params = build_signed_url_without_auth(provider, 'GET', 'New%20Folder', 'lineage') aiohttpretty.register_json_uri('GET', url, params=params, status=200, body=folder_lineage) - wb_path_v0 = await provider.validate_path('New Folder/' + file_id) + wb_path = await provider.validate_path('New Folder/' + file_id) - assert len(wb_path_v0._parts) == 3 - assert wb_path_v0.name == '59a9b628b7d1c903ab5a8f52' + assert len(wb_path._parts) == 3 + assert wb_path.name == '59a9b628b7d1c903ab5a8f52' class TestUploads: diff --git a/tests/providers/owncloud/test_provider.py b/tests/providers/owncloud/test_provider.py index d83a33479..9aa4497b4 100644 --- a/tests/providers/owncloud/test_provider.py +++ b/tests/providers/owncloud/test_provider.py @@ -67,27 +67,23 @@ class TestValidatePath: @pytest.mark.asyncio @pytest.mark.aiohttpretty - async def test_validate_v1_path_root(self, provider): - assert WaterButlerPath('/', prepend=provider.folder) == await provider.validate_v1_path('/') + async def test_validate_path_root(self, provider): + assert WaterButlerPath('/', prepend=provider.folder) == await provider.validate_path('/') @pytest.mark.asyncio @pytest.mark.aiohttpretty - async def test_validate_v1_path_file(self, provider, file_metadata): + async def test_validate_path_file(self, provider, file_metadata): path = WaterButlerPath('/triangles.txt', prepend=provider.folder) url = provider._webdav_url_ + path.full_path aiohttpretty.register_uri('PROPFIND', url, body=file_metadata, auto_length=True, status=207) try: - wb_path_v1 = await provider.validate_v1_path('/triangles.txt') + wb_path = await provider.validate_path('/triangles.txt') except Exception as exc: pytest.fail(str(exc)) - wb_path_v0 = await provider.validate_path('/triangles.txt') - - assert wb_path_v1 == wb_path_v0 - @pytest.mark.asyncio @pytest.mark.aiohttpretty - async def test_validate_v1_path_folder(self, provider, folder_metadata): + async def test_validate_path_folder(self, provider, folder_metadata): path = WaterButlerPath('/myfolder/', prepend=provider.folder) url = provider._webdav_url_ + path.full_path aiohttpretty.register_uri('PROPFIND', @@ -96,14 +92,10 @@ async def test_validate_v1_path_folder(self, provider, folder_metadata): auto_length=True, status=207) try: - wb_path_v1 = await provider.validate_v1_path('/myfolder/') + wb_path = await provider.validate_path('/myfolder/') except Exception as exc: pytest.fail(str(exc)) - wb_path_v0 = await provider.validate_path('/myfolder/') - - assert wb_path_v1 == wb_path_v0 - @pytest.mark.asyncio @pytest.mark.aiohttpretty async def test_unparsable_dav_response(self, provider, file_metadata_unparsable_response): @@ -116,16 +108,11 @@ async def test_unparsable_dav_response(self, provider, file_metadata_unparsable_ status=207) with pytest.raises(exceptions.NotFoundError): - await provider.validate_v1_path('/triangles.txt') - - try: await provider.validate_path('/triangles.txt') - except Exception as exc: - pytest.fail(str(exc)) @pytest.mark.asyncio @pytest.mark.aiohttpretty - async def test_v1_own_cloud_404(self, provider, file_metadata_unparsable_response): + async def test_own_cloud_404(self, provider, file_metadata_unparsable_response): path = WaterButlerPath('/triangles.txt', prepend=provider.folder) url = provider._webdav_url_ + path.full_path aiohttpretty.register_uri('PROPFIND', @@ -135,7 +122,7 @@ async def test_v1_own_cloud_404(self, provider, file_metadata_unparsable_respons status=404) with pytest.raises(exceptions.NotFoundError): - await provider.validate_v1_path('/triangles.txt') + await provider.validate_path('/triangles.txt') @pytest.mark.asyncio @pytest.mark.aiohttpretty @@ -149,7 +136,7 @@ async def test_response_different_of_kind_than_path(self, provider, folder_metad status=207) with pytest.raises(exceptions.NotFoundError): - await provider.validate_v1_path('/triangles.txt') + await provider.validate_path('/triangles.txt') class TestCRUD: diff --git a/tests/providers/s3/test_provider.py b/tests/providers/s3/test_provider.py index 210b32791..0f034496a 100644 --- a/tests/providers/s3/test_provider.py +++ b/tests/providers/s3/test_provider.py @@ -208,7 +208,7 @@ class TestValidatePath: @pytest.mark.asyncio @pytest.mark.aiohttpretty - async def test_validate_v1_path_file(self, provider, file_header_metadata, mock_time): + async def test_validate_path_file(self, provider, file_header_metadata, mock_time): file_path = 'foobah' params = {'prefix': '/' + file_path + '/', 'delimiter': '/'} @@ -217,25 +217,21 @@ async def test_validate_v1_path_file(self, provider, file_header_metadata, mock_ aiohttpretty.register_uri('HEAD', good_metadata_url, headers=file_header_metadata) aiohttpretty.register_uri('GET', bad_metadata_url, params=params, status=404) - assert WaterButlerPath('/') == await provider.validate_v1_path('/') + assert WaterButlerPath('/') == await provider.validate_path('/') try: - wb_path_v1 = await provider.validate_v1_path('/' + file_path) + wb_path = await provider.validate_path('/' + file_path) except Exception as exc: pytest.fail(str(exc)) with pytest.raises(exceptions.NotFoundError) as exc: - await provider.validate_v1_path('/' + file_path + '/') + await provider.validate_path('/' + file_path + '/') assert exc.value.code == client.NOT_FOUND - wb_path_v0 = await provider.validate_path('/' + file_path) - - assert wb_path_v1 == wb_path_v0 - @pytest.mark.asyncio @pytest.mark.aiohttpretty - async def test_validate_v1_path_folder(self, provider, folder_metadata, mock_time): + async def test_validate_path_folder(self, provider, folder_metadata, mock_time): folder_path = 'Photos' params = {'prefix': '/' + folder_path + '/', 'delimiter': '/'} @@ -248,19 +244,15 @@ async def test_validate_v1_path_folder(self, provider, folder_metadata, mock_tim aiohttpretty.register_uri('HEAD', bad_metadata_url, status=404) try: - wb_path_v1 = await provider.validate_v1_path('/' + folder_path + '/') + wb_path = await provider.validate_path('/' + folder_path + '/') except Exception as exc: pytest.fail(str(exc)) with pytest.raises(exceptions.NotFoundError) as exc: - await provider.validate_v1_path('/' + folder_path) + await provider.validate_path('/' + folder_path) assert exc.value.code == client.NOT_FOUND - wb_path_v0 = await provider.validate_path('/' + folder_path + '/') - - assert wb_path_v1 == wb_path_v0 - @pytest.mark.asyncio async def test_normal_name(self, provider, mock_time): path = await provider.validate_path('/this/is/a/path.txt') diff --git a/tests/utils.py b/tests/utils.py index 0f41c8d1c..e8ace351b 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -94,7 +94,6 @@ class MockProvider(provider.BaseProvider): upload = None download = None metadata = None - validate_v1_path = None validate_path = None revalidate_path = None can_duplicate_names = True @@ -109,8 +108,6 @@ def __init__(self, auth=None, creds=None, settings=None): self.metadata = MockCoroutine() self.revalidate_path = MockCoroutine( side_effect=lambda base, path, *args, **kwargs: base.child(path, *args, **kwargs)) - self.validate_v1_path = MockCoroutine( - side_effect=lambda path, **kwargs: WaterButlerPath(path, **kwargs)) self.validate_path = MockCoroutine( side_effect=lambda path, **kwargs: WaterButlerPath(path, **kwargs)) @@ -119,9 +116,6 @@ class MockProvider1(provider.BaseProvider): NAME = 'MockProvider1' - async def validate_v1_path(self, path, **kwargs): - return await self.validate_path(path, **kwargs) - async def validate_path(self, path, **kwargs): return WaterButlerPath(path) diff --git a/waterbutler/core/metadata.py b/waterbutler/core/metadata.py index b6788c59d..1c1e6044b 100644 --- a/waterbutler/core/metadata.py +++ b/waterbutler/core/metadata.py @@ -33,10 +33,6 @@ def __init__(self, raw: dict) -> None: def serialized(self) -> dict: """Returns a dict of primitives suitable for serializing into JSON. - .. note:: - - This method determines the output of API v0 and v1. - :rtype: dict """ return { diff --git a/waterbutler/core/provider.py b/waterbutler/core/provider.py index 84e44d8dd..55ba09d08 100644 --- a/waterbutler/core/provider.py +++ b/waterbutler/core/provider.py @@ -630,40 +630,17 @@ async def metadata(self, path: wb_path.WaterButlerPath, **kwargs) \ raise NotImplementedError @abc.abstractmethod - async def validate_v1_path(self, path: str, **kwargs) -> wb_path.WaterButlerPath: - """API v1 requires that requests against folder endpoints always end with a slash, and + async def validate_path(self, path: str, **kwargs) -> wb_path.WaterButlerPath: + """The API requires that requests against folder endpoints always end with a slash, and requests against files never end with a slash. This method checks the provider's metadata for the given id and throws a 404 Not Found if the implicit and explicit types don't - match. This method duplicates the logic in the provider's validate_path method, but - validate_path must currently accomodate v0 AND v1 semantics. After v0's retirement, this - method can replace validate_path. + match. ``path`` is the string in the url after the provider name and refers to the entity to be - acted on. For v1, this must *always exist*. If it does not, ``validate_v1_path`` should - return a 404. Creating a new file in v1 is done by making a PUT request against the parent + acted on. This must *always exist*. If it does not, ``validate_path`` should + return a 404. Creating a new file is done by making a PUT request against the parent folder and specifying the file name as a query parameter. If a user attempts to create a - file by PUTting to its inferred path, validate_v1_path should reject this request with a 404. - - :param path: ( :class:`str` ) user-supplied path to validate - :rtype: :class:`.WaterButlerPath` - :raises: :class:`.NotFoundError` - """ - raise NotImplementedError - - @abc.abstractmethod - async def validate_path(self, path: str, **kwargs) -> wb_path.WaterButlerPath: - """Validates paths passed in via the v0 API. v0 paths are much less strict than v1 paths. - They may represent things that exist or something that should be created. As such, the goal - of ``validate_path`` is to split the path into its component parts and attempt to determine - the ID of each part on the external provider. For instance, if the ``googledrive`` provider - receives a path of ``/foo/bar/baz.txt``, it will split those into ``/``, ``foo/``, ``bar/``, - and ``baz.txt``, and query Google Drive for the ID of each. ``validate_path`` then builds a - WaterButlerPath object with an ID, name tuple for each path part. The last part is - permitted to not have an ID, since it may represent a file that has not yet been created. - All other parts should have an ID. - - The WaterButler v0 API is deprecated and will be removed in a future release. At that time - this method will be obsolete and will be removed from all providers. + file by PUTting to its inferred path, validate_path should reject this request with a 404. :param path: ( :class:`str` ) user-supplied path to validate :rtype: :class:`.WaterButlerPath` diff --git a/waterbutler/providers/bitbucket/provider.py b/waterbutler/providers/bitbucket/provider.py index 96469af05..27a282595 100644 --- a/waterbutler/providers/bitbucket/provider.py +++ b/waterbutler/providers/bitbucket/provider.py @@ -61,7 +61,9 @@ def bitbucket_path_to_name(file_path: str, folder_path: str) -> str: def default_headers(self) -> dict: return {'Authorization': 'Bearer {}'.format(self.token)} - async def validate_v1_path(self, path: str, **kwargs) -> BitbucketPath: + async def validate_path(self, path: str, **kwargs) -> BitbucketPath: + """Validate a path + """ commit_sha = kwargs.get('commitSha') branch_name = kwargs.get('branch') @@ -105,33 +107,6 @@ async def validate_v1_path(self, path: str, **kwargs) -> BitbucketPath: return path_obj - async def validate_path(self, path: str, **kwargs) -> BitbucketPath: - commit_sha = kwargs.get('commitSha') - branch_name = kwargs.get('branch') - - # revision query param could be commit sha OR branch - # take a guess which one it will be. - revision = kwargs.get('revision', None) - if revision is not None: - try: - int(revision, 16) # is revision valid hex? - except (TypeError, ValueError): - branch_name = revision - else: - commit_sha = revision - - if not commit_sha and not branch_name: - branch_name = await self._fetch_default_branch() - - if path == '/': - return BitbucketPath(path, _ids=[(commit_sha, branch_name)]) - - path_obj = BitbucketPath(path) - for part in path_obj.parts: - part._id = (commit_sha, branch_name) - - return path_obj - def path_from_metadata(self, # type: ignore parent_path: BitbucketPath, metadata) -> BitbucketPath: @@ -152,7 +127,7 @@ async def metadata(self, path: BitbucketPath, **kwargs): # type: ignore async def revisions(self, path: BitbucketPath, **kwargs) -> list: # type: ignore """Returns a list of revisions for a file. As a VCS, Bitbucket doesn't have a single canonical history for a file. The revisions returned will be those of the file starting - with the reference supplied to or inferred by validate_v1_path(). + with the reference supplied to or inferred by validate_path(). https://confluence.atlassian.com/bitbucket/repository-resource-1-0-296095202.html#repositoryResource1.0-GETsthehistoryofafileinachangeset diff --git a/waterbutler/providers/box/provider.py b/waterbutler/providers/box/provider.py index 2cd6371a4..dfded11b1 100644 --- a/waterbutler/providers/box/provider.py +++ b/waterbutler/providers/box/provider.py @@ -48,7 +48,7 @@ def __init__(self, auth, credentials, settings): self.token = self.credentials['token'] # type: str self.folder = self.settings['folder'] # type: str - async def validate_v1_path(self, path: str, **kwargs) -> WaterButlerPath: + async def validate_path(self, path: str, **kwargs) -> WaterButlerPath: if path == '/': return WaterButlerPath('/', _ids=[self.folder]) @@ -86,70 +86,6 @@ async def validate_v1_path(self, path: str, **kwargs) -> WaterButlerPath: return WaterButlerPath('/'.join(names), _ids=ids, folder=path.endswith('/')) - async def validate_path(self, path: str, **kwargs) -> WaterButlerPath: - if path == '/': - return WaterButlerPath('/', _ids=[self.folder]) - - try: - obj_id, new_name = path.strip('/').split('/') - except ValueError: - obj_id, new_name = path.strip('/'), None - - if path.endswith('/') or new_name is not None: - files_or_folders = 'folders' - else: - files_or_folders = 'files' - - # Box file ids must be a valid base10 number - response = None - if obj_id.isdecimal(): - response = await self.make_request( - 'get', - self.build_url(files_or_folders, obj_id, fields='id,name,path_collection'), - expects=(200, 404, 405), - throws=exceptions.MetadataError, - ) - if response.status in (404, 405): - await response.release() - response = None - - if response is None: - if new_name is not None: - raise exceptions.MetadataError('Could not find {}'.format(path), code=404) - - return await self.revalidate_path( - WaterButlerPath('/', _ids=[self.folder]), - obj_id, - folder=path.endswith('/') - ) - else: - data = await response.json() # .json releases the response - - if self.folder != '0': # don't allow files outside project root - path_ids = [entry['id'] for entry in data['path_collection']['entries']] - if self.folder not in path_ids: - raise exceptions.NotFoundError(path) - - names, ids = zip(*[ - (x['name'], x['id']) - for x in - data['path_collection']['entries'] + [data] - ]) - - try: - names, ids = ('',) + names[ids.index(self.folder) + 1:], ids[ids.index(self.folder):] - except ValueError: - raise Exception # TODO - - is_folder = path.endswith('/') - - ret = WaterButlerPath('/'.join(names), _ids=ids, folder=is_folder) - - if new_name is not None: - return await self.revalidate_path(ret, new_name, folder=is_folder) - - return ret - async def revalidate_path(self, base: WaterButlerPath, path: str, folder: bool=None) -> WaterButlerPath: # TODO Research the search api endpoint diff --git a/waterbutler/providers/cloudfiles/provider.py b/waterbutler/providers/cloudfiles/provider.py index fa0248571..a61ed57f7 100644 --- a/waterbutler/providers/cloudfiles/provider.py +++ b/waterbutler/providers/cloudfiles/provider.py @@ -49,9 +49,6 @@ def __init__(self, auth, credentials, settings): self.use_public = self.settings.get('use_public', True) self.metrics.add('region', self.region) - async def validate_v1_path(self, path, **kwargs): - return await self.validate_path(path, **kwargs) - async def validate_path(self, path, **kwargs): return WaterButlerPath(path) diff --git a/waterbutler/providers/dataverse/provider.py b/waterbutler/providers/dataverse/provider.py index b9875f8b0..9e2193b72 100644 --- a/waterbutler/providers/dataverse/provider.py +++ b/waterbutler/providers/dataverse/provider.py @@ -61,18 +61,15 @@ def build_url(self, path, *segments, **query): def can_duplicate_names(self): return False - async def validate_v1_path(self, path, **kwargs): - if path != '/' and path.endswith('/'): - raise exceptions.NotFoundError(str(path)) - - return await self.validate_path(path, **kwargs) - async def validate_path(self, path, revision=None, **kwargs): """Ensure path is in configured dataset :param str path: The path to a file :param list metadata: List of file metadata from _get_data """ + if path != '/' and path.endswith('/'): + raise exceptions.NotFoundError(str(path)) + self.metrics.add('validate_path.revision', revision) if path == '/': wbpath = WaterButlerPath('/') diff --git a/waterbutler/providers/dropbox/provider.py b/waterbutler/providers/dropbox/provider.py index 5b9862515..569c6a080 100644 --- a/waterbutler/providers/dropbox/provider.py +++ b/waterbutler/providers/dropbox/provider.py @@ -121,7 +121,9 @@ def dropbox_conflict_error_handler(self, data: dict, error_path: str='') -> None raise pd_exceptions.DropboxNamingConflictError(error_path) raise pd_exceptions.DropboxUnhandledConflictError(str(data)) - async def validate_v1_path(self, path: str, **kwargs) -> WaterButlerPath: + async def validate_path(self, path: str, **kwargs) -> WaterButlerPath: + """Validate a path + """ if path == '/': return WaterButlerPath(path, prepend=self.folder) implicit_folder = path.endswith('/') @@ -135,9 +137,6 @@ async def validate_v1_path(self, path: str, **kwargs) -> WaterButlerPath: raise core_exceptions.NotFoundError(str(path)) return WaterButlerPath(path, prepend=self.folder) - async def validate_path(self, path: str, **kwargs) -> WaterButlerPath: - return WaterButlerPath(path, prepend=self.folder) - def can_duplicate_names(self) -> bool: return False diff --git a/waterbutler/providers/figshare/provider.py b/waterbutler/providers/figshare/provider.py index b5c8204b2..0628a360d 100644 --- a/waterbutler/providers/figshare/provider.py +++ b/waterbutler/providers/figshare/provider.py @@ -341,13 +341,13 @@ class FigshareProjectProvider(BaseFigshareProvider): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) - async def validate_v1_path(self, path, **kwargs): + async def validate_path(self, path, **kwargs): """Take a string path from the url and attempt to map it to an entity within this project. If the entity is found, returns a FigsharePath object with the entity identifiers included. Otherwise throws a 404 Not Found. Will also assert that the entity type inferred from the path matches the type of the entity at that url. - :param str path: entity path from the v1 API + :param str path: entity path :rtype FigsharePath: """ if path == '/': @@ -403,86 +403,6 @@ async def validate_v1_path(self, path, **kwargs): raise exceptions.NotFoundError('This article is not configured as a folder defined_type. ' '{} not found.'.format(path)) - async def validate_path(self, path, **kwargs): - """Take a string path from the url and attempt to map it to an entity within this project. - If the entity is found, returns a FigsharePath object with the entity identifiers included. - Otherwise returns a FigsharePath with empty identifiers. - - :param str path: identifier_path URN as passed through the v0 API - :rtype FigsharePath: - - Quirks: - - * v0 may pass an identifier_path whose last part is a name and not an identifier, in the - case of file/folder creation calls. - - * validate_path validates parent and returns a FigsharePath as accurately as possible. - """ - if path == '/': - return FigsharePath('/', _ids=('', ), folder=True, is_public=False) - - path_parts = self._path_split(path) - if len(path_parts) not in (2, 3): - raise exceptions.InvalidPathError('{} is not a valid Figshare path.'.format(path)) - article_id = path_parts[1] - file_id = path_parts[2] if len(path_parts) == 3 else None - - articles = await self._get_all_articles() - - # TODO: need better way to get public/private - # This call's return value is currently busted at figshare for collections. Figshare always - # returns private-looking urls. - is_public = False - for item in articles: - if '/articles/' + article_id in item['url']: - article_name = item['title'] - if pd_settings.PRIVATE_IDENTIFIER not in item['url']: - is_public = True - - article_segments = (*self.root_path_parts, 'articles', article_id) - if file_id: - file_response = await self.make_request( - 'GET', - self.build_url(is_public, *article_segments, 'files', file_id), - expects=(200, 404, ), - ) - if file_response.status == 200: - file_response_json = await file_response.json() - file_name = file_response_json['name'] - return FigsharePath('/' + article_name + '/' + file_name, - _ids=(self.container_id, article_id, file_id), - folder=False, - is_public=is_public) - await file_response.release() - - article_response = await self.make_request( - 'GET', - self.build_url(is_public, *article_segments), - expects=(200, 404, ), - ) - if article_response.status == 200: - article_json = await article_response.json() - if article_json['defined_type'] in pd_settings.FOLDER_TYPES: - # Case of v0 file creation - if file_id: - ids = ('', article_id, '') - folder = False - path_urn = '/' + article_name + '/' + file_id - else: - ids = ('', article_id) - folder = True - path_urn = '/' + article_name + '/' - return FigsharePath(path_urn, _ids=ids, folder=folder, is_public=is_public) - else: - await article_response.release() - - if file_id: - # Catch for if neither file nor article exist - raise exceptions.NotFoundError(path) - - # Return for v0 folder creation - return FigsharePath(path, _ids=('', ''), folder=True, is_public=False) - async def revalidate_path(self, parent_path, child_name, folder): """Look for file or folder named ``child_name`` under ``parent_path``. If it finds a match, it returns a FigsharePath object with the appropriate ids set. Otherwise, it returns a @@ -804,13 +724,13 @@ class FigshareArticleProvider(BaseFigshareProvider): def __init__(self, auth, credentials, settings, child=False): super().__init__(auth, credentials, settings) - async def validate_v1_path(self, path, **kwargs): + async def validate_path(self, path, **kwargs): """Take a string path from the url and attempt to map it to an entity within this article. If the entity is found, returns a FigsharePath object with the entity identifiers included. Otherwise throws a 404 Not Found. Will also assert that the entity type inferred from the path matches the type of the entity at that url. - :param str path: entity path from the v1 API + :param str path: entity path from the API :rtype FigsharePath: """ if path == '/': @@ -831,44 +751,6 @@ async def validate_v1_path(self, path, **kwargs): return FigsharePath('/' + file_json['name'], _ids=('', file_id), folder=False, is_public=False) - async def validate_path(self, path, **kwargs): - """Take a string path from the url and attempt to map it to an entity within this article. - If the entity is found, returns a FigsharePath object with the entity identifiers included. - Otherwise returns a FigsharePath with empty identifiers. - - :param str path: identifier path URN as passed through the v0 API - :rtype FigsharePath: - - Quirks: - - * v0 may pass an identifier_path whose last part is a name and not an identifier, in the - case of file/folder creation calls. - - * validate_path validates parent and returns a FigsharePath as accurately as possible. - """ - if path == '/': - return FigsharePath('/', _ids=('', ), folder=True, is_public=False) - - path_parts = self._path_split(path) - if len(path_parts) != 2: - raise exceptions.InvalidPathError('{} is not a valid Figshare path.'.format(path)) - - file_id = path_parts[1] - - resp = await self.make_request( - 'GET', - self.build_url(False, *self.root_path_parts, 'files', file_id), - expects=(200, 404, ), - ) - if resp.status == 200: - file_json = await resp.json() - file_name = file_json['name'] - return FigsharePath('/' + file_name, _ids=('', file_id), folder=False, is_public=False) - - # catch for create file in article root - await resp.release() - return FigsharePath('/' + file_id, _ids=('', ''), folder=False, is_public=False) - async def revalidate_path(self, parent_path, child_name, folder: bool=False): """Attempt to get child's id and return FigsharePath of child. diff --git a/waterbutler/providers/filesystem/provider.py b/waterbutler/providers/filesystem/provider.py index 98051e33e..43b574c45 100644 --- a/waterbutler/providers/filesystem/provider.py +++ b/waterbutler/providers/filesystem/provider.py @@ -29,7 +29,7 @@ def __init__(self, auth, credentials, settings): self.folder = self.settings['folder'] os.makedirs(self.folder, exist_ok=True) - async def validate_v1_path(self, path, **kwargs): + async def validate_path(self, path, **kwargs): if not os.path.exists(self.folder + path): raise exceptions.NotFoundError(str(path)) @@ -40,9 +40,6 @@ async def validate_v1_path(self, path, **kwargs): return WaterButlerPath(path, prepend=self.folder) - async def validate_path(self, path, **kwargs): - return WaterButlerPath(path, prepend=self.folder) - def can_duplicate_names(self): return False diff --git a/waterbutler/providers/github/provider.py b/waterbutler/providers/github/provider.py index 3a51a081b..0acd3eed0 100644 --- a/waterbutler/providers/github/provider.py +++ b/waterbutler/providers/github/provider.py @@ -66,7 +66,7 @@ def __init__(self, auth, credentials, settings): self.repo = self.settings['repo'] self.metrics.add('repo', {'repo': self.repo, 'owner': self.owner}) - async def validate_v1_path(self, path, **kwargs): + async def validate_path(self, path, **kwargs): if not getattr(self, '_repo', None): self._repo = await self._fetch_repo() self.default_branch = self._repo['default_branch'] @@ -103,35 +103,6 @@ async def validate_v1_path(self, path, **kwargs): return path - async def validate_path(self, path, **kwargs): - if not getattr(self, '_repo', None): - self._repo = await self._fetch_repo() - self.default_branch = self._repo['default_branch'] - - path = GitHubPath(path) - branch_ref, ref_from = None, None - if kwargs.get('ref'): - branch_ref = kwargs.get('ref') - ref_from = 'query_ref' - elif kwargs.get('branch'): - branch_ref = kwargs.get('branch') - ref_from = 'query_branch' - else: - branch_ref = self.default_branch - ref_from = 'default_branch' - if isinstance(branch_ref, list): - raise exceptions.InvalidParameters('Only one ref or branch may be given.') - self.metrics.add('branch_ref_from', ref_from) - - for part in path.parts: - part._id = (branch_ref, None) - - # TODO Validate that filesha is a valid sha - path.parts[-1]._id = (branch_ref, kwargs.get('fileSha')) - self.metrics.add('file_sha_given', True if kwargs.get('fileSha') else False) - - return path - async def revalidate_path(self, base, path, folder=False): return base.child(path, _id=((base.branch_ref, None)), folder=folder) diff --git a/waterbutler/providers/gitlab/provider.py b/waterbutler/providers/gitlab/provider.py index 277dd0ed2..5299d333d 100644 --- a/waterbutler/providers/gitlab/provider.py +++ b/waterbutler/providers/gitlab/provider.py @@ -69,42 +69,10 @@ def __init__(self, auth, credentials, settings): def default_headers(self) -> dict: return {'PRIVATE-TOKEN': str(self.token)} - async def validate_v1_path(self, path: str, **kwargs) -> GitLabPath: - """Turns the string ``path`` into a `GitLabPath` object. See `validate_path` for details. - This method does much the same as `validate_path`, but does two extra validation steps. - First it checks to see if the object identified by ``path`` already exists in the repo, - throwing a 404 if not. It then checks to make sure the v1 file/folder semantics are - respected. - - :param str path: The path to a file/folder - :rtype: GitLabPath - :raises: :class:`waterbutler.core.exceptions.NotFoundError` - """ - - gl_path = await self.validate_path(path, **kwargs) - - if gl_path.commit_sha is None: - commit_sha = await self._get_commit_sha_for_branch(gl_path.branch_name) - gl_path.set_commit_sha(commit_sha) - - if gl_path.is_root: - return gl_path - - data = await self._fetch_tree_contents(gl_path.parent) - - type_needed = 'tree' if gl_path.is_dir else 'blob' - found = [x for x in data if x['type'] == type_needed and x['name'] == gl_path.name] - - if not found: - raise exceptions.NotFoundError(str(gl_path)) - - return gl_path - async def validate_path(self, path: str, **kwargs) -> GitLabPath: """Turn the string ``path`` into a `GitLabPath` object. Will infer the branch/commit information from the query params or from the default branch for the repo if those are - not provided. Does no validation to ensure that the entity described by ``path`` actually - exists. + not provided. Valid kwargs are ``commitSha``, ``branch``, and ``revision``. If ``revision`` is given, its value will be assigned to the commit SHA if it is a valid base-16 number, or branch @@ -113,8 +81,16 @@ async def validate_path(self, path: str, **kwargs) -> GitLabPath: effort is made to ensure that they point to the same thing. `GitLabPath` objects default to commit SHAs over branch names when building API calls, as a commit SHA is more specific. + Then we check to see if the object identified by ``path`` already exists in the repo, + throwing a 404 if not. It then checks to make sure the file/folder semantics are + respected. + :param str path: The path to a file :rtype: GitLabPath + + :param str path: The path to a file/folder + :rtype: GitLabPath + :raises: :class:`waterbutler.core.exceptions.NotFoundError` """ commit_sha = kwargs.get('commitSha') branch_name = kwargs.get('branch') @@ -140,6 +116,21 @@ async def validate_path(self, path: str, **kwargs) -> GitLabPath: for part in gl_path.parts: part._id = (commit_sha, branch_name) + if gl_path.commit_sha is None: + commit_sha = await self._get_commit_sha_for_branch(gl_path.branch_name) + gl_path.set_commit_sha(commit_sha) + + if gl_path.is_root: + return gl_path + + data = await self._fetch_tree_contents(gl_path.parent) + + type_needed = 'tree' if gl_path.is_dir else 'blob' + found = [x for x in data if x['type'] == type_needed and x['name'] == gl_path.name] + + if not found: + raise exceptions.NotFoundError(str(gl_path)) + return gl_path def path_from_metadata(self, # type: ignore diff --git a/waterbutler/providers/googlecloud/provider.py b/waterbutler/providers/googlecloud/provider.py index db10e8ad3..43f03286d 100644 --- a/waterbutler/providers/googlecloud/provider.py +++ b/waterbutler/providers/googlecloud/provider.py @@ -98,9 +98,6 @@ def __init__(self, auth: dict, credentials: dict, settings: dict) -> None: message='Invalid or mal-formed service account credentials: {}'.format(str(exc)) ) - async def validate_v1_path(self, path: str, **kwargs) -> WaterButlerPath: - return await self.validate_path(path) - async def validate_path(self, path: str, **kwargs) -> WaterButlerPath: return WaterButlerPath(path) diff --git a/waterbutler/providers/googledrive/provider.py b/waterbutler/providers/googledrive/provider.py index 2127fad84..6d1b6daf3 100644 --- a/waterbutler/providers/googledrive/provider.py +++ b/waterbutler/providers/googledrive/provider.py @@ -83,7 +83,7 @@ def __init__(self, auth: dict, credentials: dict, settings: dict) -> None: self.token = self.credentials['token'] self.folder = self.settings['folder'] - async def validate_v1_path(self, path: str, **kwargs) -> GoogleDrivePath: + async def validate_path(self, path: str, **kwargs) -> GoogleDrivePath: if path == '/': return GoogleDrivePath('/', _ids=[self.folder['id']], folder=True) @@ -96,14 +96,6 @@ async def validate_v1_path(self, path: str, **kwargs) -> GoogleDrivePath: names, ids = zip(*[(parse.quote(x['title'], safe=''), x['id']) for x in parts]) return GoogleDrivePath('/'.join(names), _ids=ids, folder='folder' in parts[-1]['mimeType']) - async def validate_path(self, path: str, **kwargs) -> GoogleDrivePath: - if path == '/': - return GoogleDrivePath('/', _ids=[self.folder['id']], folder=True) - - parts = await self._resolve_path_to_ids(path) - names, ids = zip(*[(parse.quote(x['title'], safe=''), x['id']) for x in parts]) - return GoogleDrivePath('/'.join(names), _ids=ids, folder='folder' in parts[-1]['mimeType']) - async def revalidate_path(self, base: WaterButlerPath, name: str, diff --git a/waterbutler/providers/onedrive/provider.py b/waterbutler/providers/onedrive/provider.py index 409bd67e0..4c1e5b223 100644 --- a/waterbutler/providers/onedrive/provider.py +++ b/waterbutler/providers/onedrive/provider.py @@ -61,16 +61,12 @@ class OneDriveProvider(provider.BaseProvider): dont_escape_these = ",;[]'$#@&!~()+-_=:/" - # ========== __init__ ========== - def __init__(self, auth, credentials, settings): logger.debug('__init__ auth::{} settings::{}'.format(auth, settings)) super().__init__(auth, credentials, settings) self.token = self.credentials['token'] self.folder = self.settings['folder'] - # ========== properties ========== - @property def default_headers(self) -> dict: """Set Authorization header with access token from auth provider. @@ -79,12 +75,10 @@ def default_headers(self) -> dict: """ return {'Authorization': 'bearer {}'.format(self.token)} - # ========== methods ========== - - async def validate_v1_path(self, path: str, **kwargs) -> OneDrivePath: + async def validate_path(self, path: str, **kwargs) -> OneDrivePath: """validate that ``path`` exists and matches the implicit semantics. - See `provider.BaseProvider.validate_v1_path` for more. + See `provider.BaseProvider.validate_path` for more. :param str path: A string representing the requested path. This will be everthing after the provider name in the url. @@ -93,8 +87,7 @@ async def validate_v1_path(self, path: str, **kwargs) -> OneDrivePath: :rtype: OneDrivePath :return: a OneDrivePath object representing the new path. """ - logger.debug('validate_v1_path self::{} path::{} kwargs::{}'.format(repr(self), - path, kwargs)) + logger.debug('validate_path self::{} path::{} kwargs::{}'.format(repr(self), path, kwargs)) if path == '/': return OneDrivePath(path, _ids=[self.folder]) @@ -104,61 +97,15 @@ async def validate_v1_path(self, path: str, **kwargs) -> OneDrivePath: expects=(200, ), throws=exceptions.MetadataError ) - logger.debug('validate_v1_path resp::{}'.format(repr(resp))) + logger.debug('validate_path resp::{}'.format(repr(resp))) data = await resp.json() - logger.debug('validate_v1_path data::{}'.format(json.dumps(data))) + logger.debug('validate_path data::{}'.format(json.dumps(data))) implicit_folder = path.endswith('/') explicit_folder = data.get('folder', None) is not None if implicit_folder != explicit_folder: raise exceptions.NotFoundError(path) - # If base folder isn't root or the immediate parent of the requested path, then we need - # to verify that it actually is an ancestor of path. Otherwise, a malicious user could - # try to get access to a file outside of the configured root. - base_folder = None - if self.folder != 'root' and self.folder != data['parentReference']['id']: - base_folder_resp = await self.make_request( - 'GET', self._build_item_url(self.folder), - expects=(200, ), - throws=exceptions.MetadataError - ) - logger.debug('validate_v1_path base_folder_resp::{}'.format(repr(base_folder_resp))) - base_folder = await base_folder_resp.json() - logger.debug('validate_v1_path base_folder::{}'.format(json.dumps(base_folder))) - - base_full_path = urlparse.quote( - '{}/{}/'.format( - urlparse.unquote(base_folder['parentReference']['path']), - base_folder['name'] - ), - self.dont_escape_these - ) - - if not data['parentReference']['path'].startswith(base_full_path): - # the requested file is NOT a child of self.folder - raise exceptions.NotFoundError(path) - - od_path = OneDrivePath.new_from_response(data, self.folder, - base_folder_metadata=base_folder) - logger.debug('validate_v1_path od_path.parts::{}'.format(repr(od_path._parts))) - return od_path - - async def validate_path(self, path: str, **kwargs) -> OneDrivePath: - logger.debug('validate_path self::{} path::{} kwargs::{}'.format(repr(self), path, kwargs)) - - if path == '/': - return OneDrivePath(path, _ids=[self.folder]) - - resp = await self.make_request( - 'GET', self._build_item_url(path), - expects=(200, ), - throws=exceptions.MetadataError - ) - logger.debug('validate_path resp::{}'.format(repr(resp))) - data = await resp.json() - logger.debug('validate_path data::{}'.format(json.dumps(data))) - # If base folder isn't root or the immediate parent of the requested path, then we need # to verify that it actually is an ancestor of path. Otherwise, a malicious user could # try to get access to a file outside of the configured root. @@ -183,7 +130,7 @@ async def validate_path(self, path: str, **kwargs) -> OneDrivePath: if not data['parentReference']['path'].startswith(base_full_path): # the requested file is NOT a child of self.folder - raise exceptions.NotFoundError(path) # TESTME + raise exceptions.NotFoundError(path) od_path = OneDrivePath.new_from_response(data, self.folder, base_folder_metadata=base_folder) @@ -324,7 +271,7 @@ async def download(self, # type: ignore raise exceptions.UnexportableFileTypeError(str(path)) break else: - # TODO: we should be able to get the download url from validate_v1_path + # TODO: we should be able to get the download url from validate_path metadata_resp = await self.make_request( 'GET', self._build_drive_url(*path.api_identifier), diff --git a/waterbutler/providers/osfstorage/provider.py b/waterbutler/providers/osfstorage/provider.py index 188da5793..691a23671 100644 --- a/waterbutler/providers/osfstorage/provider.py +++ b/waterbutler/providers/osfstorage/provider.py @@ -45,6 +45,8 @@ class OSFStorageProvider(provider.BaseProvider): NAME = 'osfstorage' def __init__(self, auth, credentials, settings): + """Initialize the provider instance + """ super().__init__(auth, credentials, settings) self.nid = settings['nid'] self.root_id = settings['rootId'] @@ -57,7 +59,9 @@ def __init__(self, auth, credentials, settings): self.archive_settings = settings.get('archive') self.archive_credentials = credentials.get('archive') - async def validate_v1_path(self, path, **kwargs): + async def validate_path(self, path, **kwargs): + """Validate a path + """ if path == '/': return WaterButlerPath('/', _ids=[self.root_id], folder=True) @@ -79,37 +83,6 @@ async def validate_v1_path(self, path, **kwargs): return WaterButlerPath('/'.join(names), _ids=ids, folder=explicit_folder) - async def validate_path(self, path, **kwargs): - if path == '/': - return WaterButlerPath('/', _ids=[self.root_id], folder=True) - - ends_with_slash = path.endswith('/') - - try: - path, name = path.strip('/').split('/') - except ValueError: - path, name = path, None - - async with self.signed_request( - 'GET', - self.build_url(path, 'lineage'), - expects=(200, 404) - ) as resp: - - if resp.status == 404: - return WaterButlerPath(path, _ids=(self.root_id, None), folder=path.endswith('/')) - - data = await resp.json() - - is_folder = data['data'][0]['kind'] == 'folder' - names, ids = zip(*[(x['name'], x['id']) for x in reversed(data['data'])]) - if name is not None: - ids += (None, ) - names += (name, ) - is_folder = ends_with_slash - - return WaterButlerPath('/'.join(names), _ids=ids, folder=is_folder) - async def revalidate_path(self, base, path, folder=False): assert base.is_dir @@ -188,7 +161,7 @@ async def intra_move(self, dest_provider, src_path, dest_path): return OsfStorageFileMetadata(data, str(dest_path)), dest_path.identifier is None folder_meta = OsfStorageFolderMetadata(data, str(dest_path)) - dest_path = await dest_provider.validate_v1_path(data['path']) + dest_path = await dest_provider.validate_path(data['path']) folder_meta.children = await dest_provider._children_metadata(dest_path) return folder_meta, created @@ -220,7 +193,7 @@ async def intra_copy(self, dest_provider, src_path, dest_path): return OsfStorageFileMetadata(data, str(dest_path)), dest_path.identifier is None folder_meta = OsfStorageFolderMetadata(data, str(dest_path)) - dest_path = await dest_provider.validate_v1_path(data['path']) + dest_path = await dest_provider.validate_path(data['path']) folder_meta.children = await dest_provider._children_metadata(dest_path) return folder_meta, created diff --git a/waterbutler/providers/owncloud/provider.py b/waterbutler/providers/owncloud/provider.py index 29cc2b10e..3be1c2f77 100644 --- a/waterbutler/providers/owncloud/provider.py +++ b/waterbutler/providers/owncloud/provider.py @@ -73,7 +73,7 @@ def shares_storage_root(self, other): """ return super().shares_storage_root(other) and self.credentials == other.credentials - async def validate_v1_path(self, path, **kwargs): + async def validate_path(self, path, **kwargs): """Verifies that ``path`` exists and if so, returns a WaterButlerPath object that represents it. WebDAV returns 200 for a single file, 207 for a multipart (folder), and 404 for Does Not Exist. @@ -108,33 +108,6 @@ async def validate_v1_path(self, path, **kwargs): raise exceptions.NotFoundError(full_path.full_path) return full_path - async def validate_path(self, path, **kwargs): - """Similar to `validate_v1_path`, but will not throw a 404 if the path doesn't yet exist. - Instead, returns a WaterButlerPath object for the potential path (such as before uploads). - - :param str path: user-supplied path to validate - :return: WaterButlerPath object representing ``path`` - :rtype: :class:`waterbutler.core.path.WaterButlerPath` - """ - if path == '/': - return WaterButlerPath(path, prepend=self.folder) - full_path = WaterButlerPath(path, prepend=self.folder) - response = await self.make_request('PROPFIND', - self._webdav_url_ + full_path.full_path, - expects=(200, 207, 404), - throws=exceptions.MetadataError, - auth=self._auth, - connector=self.connector(), - ) - content = await response.content.read() - await response.release() - - try: - await utils.parse_dav_response(content, '/') - except exceptions.NotFoundError: - pass - return full_path - async def download(self, path, accept_url=False, range=None, **kwargs): """Creates a stream for downloading files from the remote host. If the metadata query for the file has no size metadata, downloads to memory. @@ -142,11 +115,13 @@ async def download(self, path, accept_url=False, range=None, **kwargs): :param waterbutler.core.path.WaterButlerPath path: user-supplied path to download :raises: `waterbutler.core.exceptions.DownloadError` """ - - self.metrics.add('download', { - 'got_accept_url': accept_url is False, - 'got_range': range is not None, - }) + self.metrics.add( + 'download', + { + 'got_accept_url': accept_url is False, + 'got_range': range is not None, + } + ) download_resp = await self.make_request( 'GET', self._webdav_url_ + path.full_path, diff --git a/waterbutler/providers/s3/provider.py b/waterbutler/providers/s3/provider.py index 514eeeaed..dd0b17382 100644 --- a/waterbutler/providers/s3/provider.py +++ b/waterbutler/providers/s3/provider.py @@ -64,7 +64,9 @@ def __init__(self, auth, credentials, settings): self.encrypt_uploads = self.settings.get('encrypt_uploads', False) self.region = None - async def validate_v1_path(self, path, **kwargs): + async def validate_path(self, path, **kwargs): + """Validate a path + """ await self._check_region() if path == '/': @@ -96,9 +98,6 @@ async def validate_v1_path(self, path, **kwargs): return WaterButlerPath(path) - async def validate_path(self, path, **kwargs): - return WaterButlerPath(path) - def can_duplicate_names(self): return True diff --git a/waterbutler/server/api/v1/provider/__init__.py b/waterbutler/server/api/v1/provider/__init__.py index dfc8787bf..af68a4ba4 100644 --- a/waterbutler/server/api/v1/provider/__init__.py +++ b/waterbutler/server/api/v1/provider/__init__.py @@ -64,7 +64,7 @@ async def prepare(self, *args, **kwargs): if method != 'post': self.auth = await auth_handler.get(self.resource, provider, self.request) self.provider = utils.make_provider(provider, self.auth['auth'], self.auth['credentials'], self.auth['settings']) - self.path = await self.provider.validate_v1_path(self.path, **self.arguments) + self.path = await self.provider.validate_path(self.path, **self.arguments) self.target_path = None diff --git a/waterbutler/server/api/v1/provider/movecopy.py b/waterbutler/server/api/v1/provider/movecopy.py index 4d868e276..7523fdc9b 100644 --- a/waterbutler/server/api/v1/provider/movecopy.py +++ b/waterbutler/server/api/v1/provider/movecopy.py @@ -95,7 +95,7 @@ async def move_or_copy(self): self.auth['credentials'], self.auth['settings'] ) - self.path = await self.provider.validate_v1_path(self.path, **self.arguments) + self.path = await self.provider.validate_path(self.path, **self.arguments) if auth_action == 'rename': # 'rename' implies the file/folder does not change location self.dest_auth = self.auth From 7ad143c2bc3ecc462e624b46f4697f80784b80e7 Mon Sep 17 00:00:00 2001 From: Josh Bird Date: Thu, 16 Aug 2018 17:49:23 -0400 Subject: [PATCH 6/6] Update tests Some tests only tested validate_path, which has been removed, so these tests were removed. Other tests needed to be updated so that they were mocked correctly because validate_v1_path (now `validate_path`) makes network requests. --- tests/providers/bitbucket/test_provider.py | 2 +- tests/providers/box/test_provider.py | 23 +--- tests/providers/dropbox/test_provider.py | 89 ++++++++++----- tests/providers/figshare/test_provider.py | 68 +++++++---- tests/providers/filesystem/test_provider.py | 24 ++-- tests/providers/github/test_provider.py | 44 -------- tests/providers/osfstorage/test_provider.py | 118 +++++++++++++------- tests/providers/s3/test_provider.py | 27 ----- waterbutler/providers/gitlab/provider.py | 5 +- 9 files changed, 210 insertions(+), 190 deletions(-) diff --git a/tests/providers/bitbucket/test_provider.py b/tests/providers/bitbucket/test_provider.py index b0994208a..786d72521 100644 --- a/tests/providers/bitbucket/test_provider.py +++ b/tests/providers/bitbucket/test_provider.py @@ -138,7 +138,7 @@ async def test_validate_path_subfolder(self, provider): dir_listing_body = test_fixtures['subfolder_dir_listing'] base_commit = dir_listing_body['node'] - dir_listing_url = provider._build_repo_url('src', 'main-branch', 'subfolder') + '/' + dir_listing_url = provider._build_v1_repo_url('src', 'main-branch', 'subfolder') + '/' aiohttpretty.register_json_uri('GET', dir_listing_url, body=dir_listing_body) path = '/subfolder/.gitkeep' diff --git a/tests/providers/box/test_provider.py b/tests/providers/box/test_provider.py index 8227de39c..99a2d82dc 100644 --- a/tests/providers/box/test_provider.py +++ b/tests/providers/box/test_provider.py @@ -108,9 +108,12 @@ async def test_validate_path_folder(self, provider, root_provider_fixtures): good_url = provider.build_url('folders', folder_id, fields='id,name,path_collection') bad_url = provider.build_url('files', folder_id, fields='id,name,path_collection') - aiohttpretty.register_json_uri('get', good_url, - body=root_provider_fixtures['folder_object_metadata'], - status=200) + aiohttpretty.register_json_uri( + 'get', + good_url, + body=root_provider_fixtures['folder_object_metadata'], + status=200 + ) aiohttpretty.register_uri('get', bad_url, status=404) try: wb_path = await provider.validate_path('/' + folder_id + '/') @@ -140,20 +143,6 @@ async def test_validate_path_bad_path(self, provider): assert e.value.message == 'Could not retrieve file or directory /bulbasaur' assert e.value.code == 404 - @pytest.mark.asyncio - @pytest.mark.aiohttpretty - async def test_validate_path(self, provider, root_provider_fixtures): - provider.folder = '0' - folder_id = '0' - - good_url = provider.build_url('folders', folder_id, 'items', fields='id,name,type', limit=1000) - aiohttpretty.register_json_uri('GET', good_url, - body=root_provider_fixtures['revalidate_metadata'], - status=200) - - result = await provider.validate_path('/bulbasaur') - assert result == WaterButlerPath('/bulbasaur', folder=False) - class TestDownload: diff --git a/tests/providers/dropbox/test_provider.py b/tests/providers/dropbox/test_provider.py index 81de31dd3..e096ed0f6 100644 --- a/tests/providers/dropbox/test_provider.py +++ b/tests/providers/dropbox/test_provider.py @@ -8,26 +8,35 @@ from waterbutler.core import metadata as core_metadata from waterbutler.core import exceptions as core_exceptions -from waterbutler.providers.dropbox.metadata import (DropboxRevision, - DropboxFileMetadata, - DropboxFolderMetadata) -from waterbutler.providers.dropbox.exceptions import (DropboxNamingConflictError, - DropboxUnhandledConflictError) -from waterbutler.providers.dropbox.settings import CHUNK_SIZE, CONTIGUOUS_UPLOAD_SIZE_LIMIT +from waterbutler.providers.dropbox.metadata import ( + DropboxRevision, + DropboxFileMetadata, + DropboxFolderMetadata +) +from waterbutler.providers.dropbox.exceptions import ( + DropboxNamingConflictError, + DropboxUnhandledConflictError +) +from waterbutler.providers.dropbox.settings import ( + CHUNK_SIZE, + CONTIGUOUS_UPLOAD_SIZE_LIMIT +) from tests.utils import MockCoroutine -from tests.providers.dropbox.fixtures import (auth, - settings, - provider, - file_like, - credentials, - file_stream, - file_content, - other_provider, - error_fixtures, - other_credentials, - provider_fixtures, - revision_fixtures,) +from tests.providers.dropbox.fixtures import ( + auth, + settings, + provider, + file_like, + credentials, + file_stream, + file_content, + other_provider, + error_fixtures, + other_credentials, + provider_fixtures, + revision_fixtures, +) def build_folder_metadata_data(path): @@ -86,8 +95,18 @@ async def test_validate_path_folder(self, provider, provider_fixtures): assert exc.value.code == HTTPStatus.NOT_FOUND + @pytest.mark.aiohttpretty @pytest.mark.asyncio - async def test_returns_path_obj(self, provider): + async def test_returns_path_obj(self, provider, provider_fixtures): + metadata_url = provider.build_url('files', 'get_metadata') + data = {"path": '/thisisapath'} + aiohttpretty.register_json_uri( + 'POST', + metadata_url, + data=data, + body=provider_fixtures['file_metadata'] + ) + path = await provider.validate_path('/thisisapath') assert path.is_file @@ -96,7 +115,16 @@ async def test_returns_path_obj(self, provider): assert provider.folder in path.full_path @pytest.mark.asyncio - async def test_with_folder(self, provider): + @pytest.mark.aiohttpretty + async def test_with_folder(self, provider, provider_fixtures): + data = {"path": "/Photos"} + metadata_url = provider.build_url('files', 'get_metadata') + aiohttpretty.register_json_uri( + 'POST', + metadata_url, + data=data, + body=provider_fixtures['folder_metadata'] + ) path = await provider.validate_path('/this/isa/folder/') assert path.is_dir @@ -131,7 +159,7 @@ async def test_download(self, provider): @pytest.mark.asyncio @pytest.mark.aiohttpretty async def test_download_not_found(self, provider, error_fixtures): - path = await provider.validate_path('/vectors.txt') + path = WaterButlerPath('/vectors.txt', prepend=provider.folder) url = provider._build_content_url('files', 'download') aiohttpretty.register_json_uri( 'POST', @@ -170,7 +198,7 @@ async def test_download_range(self, provider): @pytest.mark.asyncio @pytest.mark.aiohttpretty async def test_upload(self, provider, provider_fixtures, error_fixtures, file_stream): - path = await provider.validate_path('/phile') + path = WaterButlerPath('/phile', prepend=provider.folder) metadata_url = provider.build_url('files', 'get_metadata') data = {'path': path.full_path} aiohttpretty.register_json_uri( @@ -351,7 +379,7 @@ async def test_complete_session(self, provider, file_stream, provider_fixtures): @pytest.mark.aiohttpretty async def test_delete_file(self, provider): url = provider.build_url('files', 'delete_v2') - path = await provider.validate_path('/The past') + path = WaterButlerPath('/The past', prepend=provider.folder) data = {'path': path.full_path} aiohttpretty.register_json_uri('POST', url, data=data, status=HTTPStatus.OK) @@ -387,10 +415,15 @@ async def test_delete_root(self, provider, provider_fixtures): status=HTTPStatus.OK ) - path2 = await provider.validate_path('/photos/flower.jpg') + path2 = WaterButlerPath('/photos/flower.jpg', prepend=provider.folder) url = provider.build_url('files', 'delete_v2') data = {'path': provider.folder.rstrip('/') + '/' + path2.path.rstrip('/')} aiohttpretty.register_json_uri('POST', url, data=data, status=HTTPStatus.OK) + provider.validate_path = MockCoroutine( + side_effect=[ + WaterButlerPath('/photos/flower.jpg') + ] + ) await provider.delete(path, 1) @@ -423,7 +456,7 @@ async def test_metadata(self, provider, provider_fixtures): @pytest.mark.asyncio @pytest.mark.aiohttpretty async def test_revision_metadata(self, provider, revision_fixtures): - path = await provider.validate_path('/testfile') + path = WaterButlerPath('/testfile', prepend=provider.folder) url = provider.build_url('files', 'get_metadata') revision = 'c5bb27d11' data = {'path': 'rev:' + revision} @@ -763,7 +796,7 @@ async def test_intra_copy_replace_file( error_fixtures ): url = provider.build_url('files', 'delete_v2') - path = await provider.validate_path('/The past') + path = WaterButlerPath('/The past', prepend=provider.folder) data = {'path': path.full_path} aiohttpretty.register_json_uri('POST', url, data=data, status=HTTPStatus.OK) @@ -916,7 +949,7 @@ async def test_intra_move_file(self, provider, provider_fixtures): @pytest.mark.aiohttpretty async def test_intra_move_replace_file(self, provider, provider_fixtures, error_fixtures): url = provider.build_url('files', 'delete_v2') - path = await provider.validate_path('/The past') + path = WaterButlerPath('/The past', prepend=provider.folder) data = {'path': path.full_path} aiohttpretty.register_json_uri('POST', url, data=data, status=HTTPStatus.OK) @@ -960,7 +993,7 @@ async def test_intra_move_replace_file(self, provider, provider_fixtures, error_ @pytest.mark.aiohttpretty async def test_intra_move_replace_folder(self, provider, provider_fixtures, error_fixtures): url = provider.build_url('files', 'delete_v2') - path = await provider.validate_path('/newfolder/') + path = WaterButlerPath('/newfolder/', prepend=provider.folder) data = {'path': path.full_path} aiohttpretty.register_json_uri('POST', url, data=data, status=HTTPStatus.OK) diff --git a/tests/providers/figshare/test_provider.py b/tests/providers/figshare/test_provider.py index 210a58bc8..af43ab65d 100644 --- a/tests/providers/figshare/test_provider.py +++ b/tests/providers/figshare/test_provider.py @@ -596,36 +596,64 @@ async def test_project_upload(self, file_stream, project_provider, list_articles_url = project_provider.build_url(False, *root_parts, 'articles') validate_article_url = project_provider.build_url(False, *root_parts, 'articles', file_name) - aiohttpretty.register_json_uri('GET', list_articles_url, - body=root_provider_fixtures['list_project_articles'], - params={'page': '1', 'page_size': str(MAX_PAGE_SIZE)}) - aiohttpretty.register_json_uri('GET', list_articles_url, body=[], - params={'page': '2', 'page_size': str(MAX_PAGE_SIZE)}) + aiohttpretty.register_json_uri( + 'GET', + list_articles_url, + body=root_provider_fixtures['list_project_articles'], + params={'page': '1', 'page_size': str(MAX_PAGE_SIZE)} + ) + aiohttpretty.register_json_uri( + 'GET', + list_articles_url, + body=[], + params={'page': '2', 'page_size': str(MAX_PAGE_SIZE)} + ) aiohttpretty.register_uri('GET', validate_article_url, status=404) - path = await project_provider.validate_path('/' + file_name) path = FigsharePath('/' + file_name, _ids=('', ''), folder=False, is_public=False) article_id = str(crud_fixtures['upload_article_metadata']['id']) file_metadata = root_provider_fixtures['get_file_metadata'] create_article_url = project_provider.build_url(False, *root_parts, 'articles') create_file_url = project_provider.build_url(False, 'articles', article_id, 'files') - file_url = project_provider.build_url(False, 'articles', article_id, 'files', - str(file_metadata['id'])) + file_url = project_provider.build_url( + False, + 'articles', + article_id, + 'files', + str(file_metadata['id']) + ) get_article_url = project_provider.build_url(False, *root_parts, 'articles', article_id) upload_url = file_metadata['upload_url'] - aiohttpretty.register_json_uri('POST', create_article_url, - body=crud_fixtures['create_article_metadata'], status=201) - aiohttpretty.register_json_uri('POST', create_file_url, - body=crud_fixtures['create_file_metadata'], status=201) - aiohttpretty.register_json_uri('GET', file_url, - body=file_metadata) - aiohttpretty.register_json_uri('GET', upload_url, - body=root_provider_fixtures['get_upload_metadata']) + aiohttpretty.register_json_uri( + 'POST', + create_article_url, + body=crud_fixtures['create_article_metadata'], + status=201 + ) + aiohttpretty.register_json_uri( + 'POST', + create_file_url, + body=crud_fixtures['create_file_metadata'], + status=201 + ) + aiohttpretty.register_json_uri( + 'GET', + file_url, + body=file_metadata + ) + aiohttpretty.register_json_uri( + 'GET', + upload_url, + body=root_provider_fixtures['get_upload_metadata'] + ) aiohttpretty.register_uri('PUT', '{}/1'.format(upload_url), status=200) aiohttpretty.register_uri('POST', file_url, status=202) - aiohttpretty.register_json_uri('GET', get_article_url, - body=crud_fixtures['upload_article_metadata']) + aiohttpretty.register_json_uri( + 'GET', + get_article_url, + body=crud_fixtures['upload_article_metadata'] + ) # md5 hash calculation is being hacked around. see test class docstring result, created = await project_provider.upload(file_stream, path) @@ -636,9 +664,7 @@ async def test_project_upload(self, file_stream, project_provider, assert aiohttpretty.has_call( method='POST', uri=create_article_url, - data=json.dumps({ - 'title': 'barricade.gif', - }) + data=json.dumps({'title': 'barricade.gif'}) ) assert aiohttpretty.has_call(method='PUT', uri='{}/1'.format(upload_url)) assert aiohttpretty.has_call(method='POST', uri=create_file_url) diff --git a/tests/providers/filesystem/test_provider.py b/tests/providers/filesystem/test_provider.py index aa5fbc753..bca61456e 100644 --- a/tests/providers/filesystem/test_provider.py +++ b/tests/providers/filesystem/test_provider.py @@ -113,7 +113,7 @@ async def test_download_range_open_ended(self, provider): @pytest.mark.asyncio async def test_download_not_found(self, provider): - path = await provider.validate_path('/missing.txt') + path = WaterButlerPath('/missing.txt', prepend=provider.folder) with pytest.raises(exceptions.DownloadError): await provider.download(path) @@ -126,7 +126,7 @@ async def test_upload_create(self, provider): file_content = b'Test Upload Content' file_stream = streams.StringStream(file_content) - path = await provider.validate_path(file_path) + path = WaterButlerPath(file_path, prepend=provider.folder) metadata, created = await provider.upload(file_stream, path) assert metadata.name == file_name @@ -160,7 +160,7 @@ async def test_upload_nested_create(self, provider): file_content = b'Test New Nested Content' file_stream = streams.FileStreamReader(io.BytesIO(file_content)) - path = await provider.validate_path(file_path) + path = WaterButlerPath(file_path, prepend=provider.folder) metadata, created = await provider.upload(file_stream, path) assert metadata.name == file_name @@ -242,7 +242,7 @@ async def test_metadata_root_file(self, provider): @pytest.mark.asyncio async def test_metadata_missing(self, provider): - path = await provider.validate_path('/missing.txt') + path = WaterButlerPath('/missing.txt', prepend=provider.folder) with pytest.raises(exceptions.MetadataError): await provider.metadata(path) @@ -252,8 +252,10 @@ class TestIntra: @pytest.mark.asyncio async def test_intra_copy_file(self, provider): - src_path = await provider.validate_path('/flower.jpg') - dest_path = await provider.validate_path('/subfolder/flower.jpg') + """ + """ + src_path = WaterButlerPath('/flower.jpg', prepend=provider.folder) + dest_path = WaterButlerPath('/subfolder/flower.jpg', prepend=provider.folder) result = await provider.intra_copy(provider, src_path, dest_path) @@ -265,8 +267,8 @@ async def test_intra_copy_file(self, provider): @pytest.mark.asyncio async def test_intra_move_folder(self, provider): - src_path = await provider.validate_path('/subfolder/') - dest_path = await provider.validate_path('/other_subfolder/subfolder/') + src_path = WaterButlerPath('/subfolder/', prepend=provider.folder) + dest_path = WaterButlerPath('/other_subfolder/subfolder/', prepend=provider.folder) result = await provider.intra_move(provider, src_path, dest_path) @@ -277,8 +279,10 @@ async def test_intra_move_folder(self, provider): @pytest.mark.asyncio async def test_intra_move_file(self, provider): - src_path = await provider.validate_path('/flower.jpg') - dest_path = await provider.validate_path('/subfolder/flower.jpg') + """Test the `intra_move` method + """ + src_path = WaterButlerPath('/flower.jpg', prepend=provider.folder) + dest_path = WaterButlerPath('/subfolder/flower.jpg', prepend=provider.folder) result = await provider.intra_move(provider, src_path, dest_path) diff --git a/tests/providers/github/test_provider.py b/tests/providers/github/test_provider.py index 3dd3b21fa..65d6c177b 100644 --- a/tests/providers/github/test_provider.py +++ b/tests/providers/github/test_provider.py @@ -204,50 +204,6 @@ async def test_reject_multiargs(self, provider): assert exc.value.code == client.BAD_REQUEST - @pytest.mark.asyncio - async def test_validate_path(self, provider): - path = await provider.validate_path('/this/is/my/path') - - assert path.is_dir is False - assert path.is_file is True - assert path.name == 'path' - assert isinstance(path.identifier, tuple) - assert path.identifier == (provider.default_branch, None) - assert path.parts[0].identifier == (provider.default_branch, None) - - @pytest.mark.asyncio - async def test_validate_path_passes_branch(self, provider): - path = await provider.validate_path('/this/is/my/path', branch='NotMaster') - - assert path.is_dir is False - assert path.is_file is True - assert path.name == 'path' - assert isinstance(path.identifier, tuple) - assert path.identifier == ('NotMaster', None) - assert path.parts[0].identifier == ('NotMaster', None) - - @pytest.mark.asyncio - async def test_validate_path_passes_ref(self, provider): - path = await provider.validate_path('/this/is/my/path', ref='NotMaster') - - assert path.is_dir is False - assert path.is_file is True - assert path.name == 'path' - assert isinstance(path.identifier, tuple) - assert path.identifier == ('NotMaster', None) - assert path.parts[0].identifier == ('NotMaster', None) - - @pytest.mark.asyncio - async def test_validate_path_passes_file_sha(self, provider): - path = await provider.validate_path('/this/is/my/path', fileSha='Thisisasha') - - assert path.is_dir is False - assert path.is_file is True - assert path.name == 'path' - assert isinstance(path.identifier, tuple) - assert path.identifier == (provider.default_branch, 'Thisisasha') - assert path.parts[0].identifier == (provider.default_branch, None) - @pytest.mark.asyncio async def test_revalidate_path(self, provider): path = '/' diff --git a/tests/providers/osfstorage/test_provider.py b/tests/providers/osfstorage/test_provider.py index 054547ddd..5246a0c98 100644 --- a/tests/providers/osfstorage/test_provider.py +++ b/tests/providers/osfstorage/test_provider.py @@ -76,6 +76,9 @@ async def test_download_with_auth(self, provider_and_mock, download_response, do 'download', version=None, mode=None) aiohttpretty.register_json_uri('GET', uri, body=download_response, params=params) + inner_provider.validate_path = utils.MockCoroutine( + return_value=WaterButlerPath('/043be9ff919762f0dc36fff0222cd90c753ce28b39feb52112be9360c476ef88') + ) await provider.download(download_path) @@ -96,10 +99,18 @@ async def test_download_without_auth(self, provider_and_mock, download_response, provider, inner_provider = provider_and_mock provider.auth = {} - url, params = build_signed_url_without_auth(provider, 'GET', download_path.identifier, - 'download', version=None, mode=None) + url, params = build_signed_url_without_auth( + provider, + 'GET', + download_path.identifier, + 'download', + version=None, + mode=None + ) aiohttpretty.register_json_uri('GET', url, params=params, body=download_response) - + inner_provider.validate_path = utils.MockCoroutine( + return_value=WaterButlerPath('/043be9ff919762f0dc36fff0222cd90c753ce28b39feb52112be9360c476ef88') + ) await provider.download(download_path) assert provider.make_provider.called @@ -109,8 +120,10 @@ async def test_download_without_auth(self, provider_and_mock, download_response, expected_path = WaterButlerPath('/' + download_response['data']['path']) expected_display_name = download_response['data']['name'] - inner_provider.download.assert_called_once_with(path=expected_path, - displayName=expected_display_name) + inner_provider.download.assert_called_once_with( + path=expected_path, + displayName=expected_display_name + ) @pytest.mark.asyncio @pytest.mark.aiohttpretty @@ -563,10 +576,6 @@ async def test_validate_path_404s(self, provider, file_lineage, mock_time): with pytest.raises(exceptions.UnhandledProviderError): await provider.validate_path('/' + file_id) - wb_path = await provider.validate_path('/' + file_id) - - assert wb_path == WaterButlerPath(file_lineage['data'][0]['path'], prepend=None) - @pytest.mark.asyncio @pytest.mark.aiohttpretty async def test_revalidate_path_new(self, provider, folder_path, folder_children_metadata, @@ -595,22 +604,6 @@ async def test_revalidate_path_existing(self, provider, folder_path, folder_chil assert revalidated_path.name == 'one' - @pytest.mark.asyncio - @pytest.mark.aiohttpretty - async def test_validate_path_nested(self, provider, file_lineage, folder_lineage, mock_time): - file_id = file_lineage['data'][0]['id'] - - url, params = build_signed_url_without_auth(provider, 'GET', file_id, 'lineage') - aiohttpretty.register_json_uri('GET', url, params=params, status=200, body=file_lineage) - - url, params = build_signed_url_without_auth(provider, 'GET', 'New%20Folder', 'lineage') - aiohttpretty.register_json_uri('GET', url, params=params, status=200, body=folder_lineage) - - wb_path = await provider.validate_path('New Folder/' + file_id) - - assert len(wb_path._parts) == 3 - assert wb_path.name == '59a9b628b7d1c903ab5a8f52' - class TestUploads: def patch_tasks(self, monkeypatch): @@ -630,6 +623,13 @@ async def test_upload_new(self, monkeypatch, provider_and_mock, file_stream, provider, inner_provider = provider_and_mock inner_provider.metadata = utils.MockCoroutine(return_value=utils.MockFileMetadata()) + inner_provider.validate_path = utils.MockCoroutine( + side_effect=[ + WaterButlerPath('/patched_path', _ids=('rootId', 'folder1')), + WaterButlerPath('/e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855', prepend=None), + WaterButlerPath('/patched_path', prepend=None) + ] + ) res, created = await provider.upload(file_stream, upload_path) @@ -644,8 +644,12 @@ async def test_upload_new(self, monkeypatch, provider_and_mock, file_stream, inner_provider.delete.assert_called_once_with(WaterButlerPath('/patched_path')) expected_path = WaterButlerPath('/' + file_stream.writers['sha256'].hexdigest) inner_provider.metadata.assert_called_once_with(expected_path) - inner_provider.upload.assert_called_once_with(file_stream, WaterButlerPath('/patched_path'), - check_created=False, fetch_metadata=False) + inner_provider.upload.assert_called_once_with( + file_stream, + WaterButlerPath('/patched_path'), + check_created=False, + fetch_metadata=False + ) @pytest.mark.asyncio @pytest.mark.aiohttpretty @@ -657,6 +661,12 @@ async def test_upload_existing(self, monkeypatch, provider_and_mock, file_stream url = 'https://waterbutler.io/{}/children/'.format(upload_path.parent.identifier) inner_provider.move.return_value = (utils.MockFileMetadata(), True) + inner_provider.validate_path = utils.MockCoroutine( + side_effect=[ + WaterButlerPath('/patched_path', _ids=('rootId', 'folder1')), + WaterButlerPath('/e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855', _ids=('rootId', 'folder1')), + ] + ) inner_provider.metadata.side_effect = exceptions.MetadataError('Boom!', code=404) aiohttpretty.register_json_uri('POST', url, status=200, body=upload_response) @@ -709,6 +719,13 @@ async def test_upload_and_tasks(self, monkeypatch, provider_and_mock, file_strea # mock_parity = mock.Mock() # mock_backup = mock.Mock() inner_provider.move.return_value = (utils.MockFileMetadata(), True) + inner_provider.validate_path = utils.MockCoroutine( + side_effect=[ + WaterButlerPath('/uniquepath', _ids=('rootId', 'folder1')), + WaterButlerPath('/e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855', _ids=('rootId', 'folder1')), + WaterButlerPath('/e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855', _ids=('rootId', 'folder1')) + ] + ) inner_provider.metadata.side_effect = exceptions.MetadataError('Boom!', code=404) aiohttpretty.register_json_uri('POST', url, status=201, body=upload_response) @@ -727,20 +744,38 @@ async def test_upload_and_tasks(self, monkeypatch, provider_and_mock, file_strea assert res.extra['downloads'] == 0 assert res.extra['checkout'] is None - inner_provider.upload.assert_called_once_with(file_stream, WaterButlerPath('/uniquepath'), - check_created=False, fetch_metadata=False) - # complete_path = os.path.join(FILE_PATH_COMPLETE, local_complete_dir, - # file_stream.writers['sha256'].hexdigest) - # mock_parity.assert_called_once_with(complete_path, upload_response['version'], - # 'https://waterbutler.io/hooks/metadata/', - # credentials['parity'], settings['parity']) - # mock_backup.assert_called_once_with(complete_path, upload_response['version'], - # 'https://waterbutler.io/hooks/metadata/', - # credentials['archive'], settings['archive']) + inner_provider.upload.assert_called_once_with( + file_stream, + WaterButlerPath('/uniquepath'), + check_created=False, + fetch_metadata=False + ) + # complete_path = os.path.join( + # FILE_PATH_COMPLETE, + # local_complete_dir, + # file_stream.writers['sha256'].hexdigest + # ) + # mock_parity.assert_called_once_with( + # complete_path, + # upload_response['version'], + # 'https://waterbutler.io/hooks/metadata/', + # credentials['parity'], + # settings['parity'] + # ) + # mock_backup.assert_called_once_with( + # complete_path, + # upload_response['version'], + # 'https://waterbutler.io/hooks/metadata/', + # credentials['archive'], + # settings['archive'] + # ) expected_path = WaterButlerPath('/' + file_stream.writers['sha256'].hexdigest) - inner_provider.metadata.assert_called_once_with(expected_path) - inner_provider.move.assert_called_once_with(inner_provider, WaterButlerPath('/uniquepath'), - expected_path) + #inner_provider.metadata.assert_called_once_with(expected_path) + inner_provider.move.assert_called_once_with( + inner_provider, + WaterButlerPath('/uniquepath'), + expected_path + ) @pytest.mark.asyncio @pytest.mark.aiohttpretty @@ -754,6 +789,9 @@ async def test_upload_fails(self, monkeypatch, provider_and_mock, file_stream, u aiohttpretty.register_json_uri('POST', url, status=201, body=upload_response) inner_provider.metadata = utils.MockCoroutine(return_value=utils.MockFileMetadata()) + inner_provider.validate_path = utils.MockCoroutine( + return_value=WaterButlerPath('/patched_path', _ids=('rootId', 'folder1')) + ) inner_provider.upload.side_effect = Exception() with pytest.raises(Exception): diff --git a/tests/providers/s3/test_provider.py b/tests/providers/s3/test_provider.py index 0f034496a..8c4bf9579 100644 --- a/tests/providers/s3/test_provider.py +++ b/tests/providers/s3/test_provider.py @@ -253,33 +253,6 @@ async def test_validate_path_folder(self, provider, folder_metadata, mock_time): assert exc.value.code == client.NOT_FOUND - @pytest.mark.asyncio - async def test_normal_name(self, provider, mock_time): - path = await provider.validate_path('/this/is/a/path.txt') - assert path.name == 'path.txt' - assert path.parent.name == 'a' - assert path.is_file - assert not path.is_dir - assert not path.is_root - - @pytest.mark.asyncio - async def test_folder(self, provider, mock_time): - path = await provider.validate_path('/this/is/a/folder/') - assert path.name == 'folder' - assert path.parent.name == 'a' - assert not path.is_file - assert path.is_dir - assert not path.is_root - - @pytest.mark.asyncio - async def test_root(self, provider, mock_time): - path = await provider.validate_path('/this/is/a/folder/') - assert path.name == 'folder' - assert path.parent.name == 'a' - assert not path.is_file - assert path.is_dir - assert not path.is_root - class TestCRUD: diff --git a/waterbutler/providers/gitlab/provider.py b/waterbutler/providers/gitlab/provider.py index 5299d333d..224edd91b 100644 --- a/waterbutler/providers/gitlab/provider.py +++ b/waterbutler/providers/gitlab/provider.py @@ -110,9 +110,10 @@ async def validate_path(self, path: str, **kwargs) -> GitLabPath: branch_name = await self._fetch_default_branch() if path == '/': - return GitLabPath(path, _ids=[(commit_sha, branch_name)]) + gl_path = GitLabPath(path, _ids=[(commit_sha, branch_name)]) + else: + gl_path = GitLabPath(path) - gl_path = GitLabPath(path) for part in gl_path.parts: part._id = (commit_sha, branch_name)