diff --git a/.gitignore b/.gitignore index d41b9a26..cb2d49cf 100644 --- a/.gitignore +++ b/.gitignore @@ -4,7 +4,7 @@ *.egg-info *build/ *dist/ +*htmlcov/ *.coverage -.coveragerc .cache/ tests/__pycache__/ diff --git a/README.rst b/README.rst index 625cda0e..924d0389 100644 --- a/README.rst +++ b/README.rst @@ -30,14 +30,14 @@ Features - Clean, Pythonic interface - Lightweight -- 95%+ API coverage +- 95%+ ArangoDB REST API coverage Compatibility ============= - Python versions 2.7.x, 3.4.x and 3.5.x are supported - Latest version of python-arango (3.x) supports ArangoDB 3.x only -- Older versions of python-arango (2.x) support ArangoDB 2.x only +- Older versions of python-arango support ArangoDB 1.x ~ 2.x only Installation ============ @@ -74,7 +74,7 @@ Here is simple usage example: # Initialize the client for ArangoDB client = ArangoClient( protocol='http', - host="localhost", + host='localhost', port=8529, username='root', password='', diff --git a/arango/aql.py b/arango/aql.py index 90a3bb1d..2b4d24de 100644 --- a/arango/aql.py +++ b/arango/aql.py @@ -9,10 +9,10 @@ AQLQueryExecuteError, AQLFunctionCreateError, AQLFunctionDeleteError, - AQLFunctionsListError, + AQLFunctionListError, AQLCacheClearError, AQLCacheConfigureError, - AQLCacheGetPropertiesError + AQLCachePropertiesError ) from arango.request import Request @@ -168,14 +168,14 @@ def functions(self): :returns: a mapping of AQL function names to its javascript code :rtype: dict - :raises arango.exceptions.AQLFunctionsListError: if the AQL functions + :raises arango.exceptions.AQLFunctionListError: if the AQL functions cannot be retrieved """ request = Request(method='get', endpoint='/_api/aqlfunction') def handler(res): if res.status_code not in HTTP_OK: - raise AQLFunctionsListError(res) + raise AQLFunctionListError(res) body = res.body or {} return {func['name']: func['code'] for func in map(dict, body)} @@ -259,7 +259,7 @@ def properties(self): :returns: the cache properties :rtype: dict - :raises arango.exceptions.AQLCacheGetPropertiesError: if the cache + :raises arango.exceptions.AQLCachePropertiesError: if the cache properties cannot be retrieved """ request = Request( @@ -269,7 +269,7 @@ def properties(self): def handler(res): if res.status_code not in HTTP_OK: - raise AQLCacheGetPropertiesError(res) + raise AQLCachePropertiesError(res) return {'mode': res.body['mode'], 'limit': res.body['maxResults']} return request, handler diff --git a/arango/async.py b/arango/async.py index c6b44ae4..a6622326 100644 --- a/arango/async.py +++ b/arango/async.py @@ -5,12 +5,9 @@ from arango.utils import HTTP_OK from arango.exceptions import ( AsyncExecuteError, - AsyncJobInvalidError, - AsyncJobNotDoneError, - AsyncJobNotFoundError, AsyncJobCancelError, - AsyncJobGetStatusError, - AsyncJobGetResultError, + AsyncJobStatusError, + AsyncJobResultError, AsyncJobClearError ) from arango.graph import Graph @@ -39,14 +36,15 @@ def __init__(self, connection, return_result=True): username=connection.username, password=connection.password, http_client=connection.http_client, - database=connection.database + database=connection.database, + enable_logging=connection.has_logging ) self._return_result = return_result self._aql = AQL(self) self._type = 'async' def __repr__(self): - return '' + return '' def handle_request(self, request, handler): """Handle the incoming request and response handler. @@ -57,11 +55,13 @@ def handle_request(self, request, handler): :type handler: callable :returns: the async job or None :rtype: arango.async.AsyncJob + :raises arango.exceptions.AsyncExecuteError: if the async request + cannot be executed """ if self._return_result: request.headers['x-arango-async'] = 'store' else: - request.headers['x-arango-async'] = True + request.headers['x-arango-async'] = 'true' res = getattr(self, request.method)(**request.kwargs) if res.status_code not in HTTP_OK: @@ -145,55 +145,36 @@ def status(self): """Return the status of the async job from the server. :returns: the status of the async job, which can be ``"pending"`` (the - job is still in the queue), ``"done"`` (the job completed or raised + job is still in the queue), ``"done"`` (the job finished or raised an exception) :rtype: str - :raises arango.exceptions.AsyncJobInvalidError: if the async job is - not valid - :raises arango.exceptions.AsyncJobNotFoundError: if the async job - cannot be found in the server - :raises arango.exceptions.AsyncJobGetStatusError: if the status of the + :raises arango.exceptions.AsyncJobStatusError: if the status of the async job cannot be retrieved from the server """ - res = self._conn.get('/_api/job/{}'.format(self._id)) + res = self._conn.get('/_api/job/{}'.format(self.id)) if res.status_code == 204: return 'pending' elif res.status_code in HTTP_OK: return 'done' - elif res.status_code == 400: - raise AsyncJobInvalidError(res) elif res.status_code == 404: - raise AsyncJobNotFoundError(res) + raise AsyncJobStatusError(res, 'Job {} missing'.format(self.id)) else: - raise AsyncJobGetStatusError(res) + raise AsyncJobStatusError(res) def result(self): """Return the result of the async job if available. :returns: the result or the exception from the async job :rtype: object - :raises arango.exceptions.AsyncJobInvalidError: if the async job is - not valid - :raises arango.exceptions.AsyncJobNotFoundError: if the async job - cannot be found in the server - :raises arango.exceptions.AsyncJobNotDoneError: if the async job is - still pending in the queue - :raises arango.exceptions.AsyncJobGetResultError: if the result of the + :raises arango.exceptions.AsyncJobResultError: if the result of the async job cannot be retrieved from the server .. note:: An async job result will automatically be cleared from the server once fetched and will *not* be available in subsequent calls. """ - _id = self._id - res = self._conn.put('/_api/job/{}'.format(_id)) - if ( - res.status_code == 404 and - res.error_code == 404 and - res.error_message == 'not found' - ): - raise AsyncJobNotFoundError(res, 'Job {} not found'.format(_id)) - elif res.body is not None: + res = self._conn.put('/_api/job/{}'.format(self._id)) + if 'X-Arango-Async-Id' in res.headers: try: result = self._handler(res) except Exception as error: @@ -201,10 +182,11 @@ def result(self): else: return result elif res.status_code == 204: - raise AsyncJobNotDoneError(res, 'Job {} pending'.format(_id)) - elif res.status_code == 400: - raise AsyncJobInvalidError(res, 'Job {} invalid'.format(_id)) - raise AsyncJobGetResultError(res, 'Failed to query job {}'.format(_id)) + raise AsyncJobResultError(res, 'Job {} not done'.format(self._id)) + elif res.status_code == 404: + raise AsyncJobResultError(res, 'Job {} missing'.format(self._id)) + else: + raise AsyncJobResultError(res) def cancel(self, ignore_missing=False): """Cancel the async job if it is still pending. @@ -214,55 +196,40 @@ def cancel(self, ignore_missing=False): :returns: ``True`` if the job was cancelled successfully, ``False`` if the job was not found but **ignore_missing** was set to ``True`` :rtype: bool - :raises arango.exceptions.AsyncJobInvalidError: if the async job is - not valid - :raises arango.exceptions.AsyncJobNotFoundError: if the async job - cannot be found in the server :raises arango.exceptions.AsyncJobCancelError: if the async job cannot be cancelled .. note:: - An async job cannot be cancelled once it is taken out of the queue. + An async job cannot be cancelled once it is taken out of the queue + (i.e. started, finished or cancelled). """ - _id = self._id - res = self._conn.put('/_api/job/{}/cancel'.format(_id)) + res = self._conn.put('/_api/job/{}/cancel'.format(self._id)) if res.status_code == 200: return True - elif res.status_code == 400: - raise AsyncJobInvalidError(res, 'Job {} invalid'.format(_id)) elif res.status_code == 404: if ignore_missing: return False - raise AsyncJobNotFoundError(res, 'Job {} not found'.format(_id)) - raise AsyncJobCancelError(res, 'Failed to cancel job {}'.format(_id)) + raise AsyncJobCancelError(res, 'Job {} missing'.format(self._id)) + else: + raise AsyncJobCancelError(res) def clear(self, ignore_missing=False): - """Clear the result of the job from the server if available. - - If the result is deleted successfully, boolean True is returned. If - the job was not found but ``ignore_missing`` was set, boolean False - is returned. + """Delete the result of the job from the server. :param ignore_missing: ignore missing async jobs :type ignore_missing: bool :returns: ``True`` if the result was deleted successfully, ``False`` if the job was not found but **ignore_missing** was set to ``True`` :rtype: bool - :raises arango.exceptions.AsyncJobInvalidError: if the async job is - not valid - :raises arango.exceptions.AsyncJobNotFoundError: if the async job - cannot be found in the server - :raises arango.exceptions.AsyncJobClearError: if the result of - the async job cannot be removed from the server + :raises arango.exceptions.AsyncJobClearError: if the result of the + async job cannot be delete from the server """ - _id = self._id - res = self._conn.delete('/_api/job/{}'.format(_id)) + res = self._conn.delete('/_api/job/{}'.format(self._id)) if res.status_code in HTTP_OK: return True - elif res.status_code == 400: - raise AsyncJobInvalidError(res, 'Job {} invalid'.format(_id)) elif res.status_code == 404: if ignore_missing: return False - raise AsyncJobNotFoundError(res, 'Job {} not found'.format(_id)) - raise AsyncJobClearError(res, 'Failed to clear job {}'.format(_id)) + raise AsyncJobClearError(res, 'Job {} missing'.format(self._id)) + else: + raise AsyncJobClearError(res) diff --git a/arango/batch.py b/arango/batch.py index a926cbb6..4a89d484 100644 --- a/arango/batch.py +++ b/arango/batch.py @@ -38,7 +38,8 @@ def __init__(self, connection, return_result=True, commit_on_error=False): username=connection.username, password=connection.password, http_client=connection.http_client, - database=connection.database + database=connection.database, + enable_logging=connection.has_logging ) self._id = uuid4() self._return_result = return_result @@ -50,7 +51,7 @@ def __init__(self, connection, return_result=True, commit_on_error=False): self._type = 'batch' def __repr__(self): - return ''.format(self._id) + return ''.format(self._id) def __enter__(self): return self diff --git a/arango/client.py b/arango/client.py index 2b5479af..ae71416f 100644 --- a/arango/client.py +++ b/arango/client.py @@ -38,7 +38,7 @@ def __init__(self, port=8529, username='root', password='', - verify=True, + verify=False, http_client=None, enable_logging=True): @@ -61,15 +61,25 @@ def __init__(self, ) self._wal = WriteAheadLog(self._conn) - # Verify the server connection if verify: - res = self._conn.head('/_api/version') - if res.status_code not in HTTP_OK: - raise ServerConnectionError(res) + self.verify() def __repr__(self): return ''.format(self._host) + def verify(self): + """Verify the connection to ArangoDB server. + + :returns: ``True`` if the connection is successful + :rtype: bool + :raises arango.exceptions.ServerConnectionError: if the connection to + the ArangoDB server fails + """ + res = self._conn.head('/_api/version') + if res.status_code not in HTTP_OK: + raise ServerConnectionError(res) + return True + @property def protocol(self): """Return the internet transfer protocol. @@ -147,7 +157,7 @@ def version(self): :returns: the server version :rtype: str - :raises arango.exceptions.ServerGetVersionError: if the server version + :raises arango.exceptions.ServerVersionError: if the server version cannot be retrieved """ res = self._conn.get( @@ -155,7 +165,7 @@ def version(self): params={'details': False} ) if res.status_code not in HTTP_OK: - raise ServerGetVersionError(res) + raise ServerVersionError(res) return res.body['version'] def details(self): @@ -163,7 +173,7 @@ def details(self): :returns: the server details :rtype: dict - :raises arango.exceptions.ServerGetDetailsError: if the server details + :raises arango.exceptions.ServerDetailsError: if the server details cannot be retrieved """ res = self._conn.get( @@ -171,7 +181,7 @@ def details(self): params={'details': True} ) if res.status_code not in HTTP_OK: - raise ServerGetDetailsError(res) + raise ServerDetailsError(res) return res.body['details'] def required_db_version(self): @@ -179,12 +189,12 @@ def required_db_version(self): :returns: the required version of the target database :rtype: str - :raises arango.exceptions.ServerGetRequiredVersionError: if the + :raises arango.exceptions.ServerRequiredDBVersionError: if the required database version cannot be retrieved """ res = self._conn.get('/_admin/database/target-version') if res.status_code not in HTTP_OK: - raise ServerGetRequiredVersionError(res) + raise ServerRequiredDBVersionError(res) return res.body['version'] def statistics(self, description=False): @@ -192,7 +202,7 @@ def statistics(self, description=False): :returns: the statistics information :rtype: dict - :raises arango.exceptions.ServerGetStatisticsError: if the server + :raises arango.exceptions.ServerStatisticsError: if the server statistics cannot be retrieved """ res = self._conn.get( @@ -200,7 +210,7 @@ def statistics(self, description=False): if description else '/_admin/statistics' ) if res.status_code not in HTTP_OK: - raise ServerGetStatisticsError(res) + raise ServerStatisticsError(res) res.body.pop('code', None) res.body.pop('error', None) return res.body @@ -215,12 +225,12 @@ def role(self): in the cluster) or ``"UNDEFINED"`` (the server role is undefined, the only possible value for a single server) :rtype: str - :raises arango.exceptions.ServerGetRoleError: if the server role cannot + :raises arango.exceptions.ServerRoleError: if the server role cannot be retrieved """ res = self._conn.get('/_admin/server/role') if res.status_code not in HTTP_OK: - raise ServerGetRoleError(res) + raise ServerRoleError(res) return res.body.get('role') def time(self): @@ -228,12 +238,12 @@ def time(self): :returns: the server system time :rtype: datetime.datetime - :raises arango.exceptions.ServerGetTimeError: if the server time + :raises arango.exceptions.ServerTimeError: if the server time cannot be retrieved """ res = self._conn.get('/_admin/time') if res.status_code not in HTTP_OK: - raise ServerGetTimeError(res) + raise ServerTimeError(res) return datetime.fromtimestamp(res.body['time']) def endpoints(self): @@ -246,12 +256,12 @@ def endpoints(self): :returns: the list of endpoints :rtype: list - :raises arango.exceptions.ServerGetEndpointsError: if the endpoints + :raises arango.exceptions.ServerEndpointsError: if the endpoints cannot be retrieved from the server """ res = self._conn.get('/_api/endpoint') if res.status_code not in HTTP_OK: - raise ServerGetEndpointsError(res) + raise ServerEndpointsError(res) return res.body def echo(self): @@ -285,7 +295,7 @@ def sleep(self, seconds): raise ServerSleepError(res) return res.body['duration'] - def shutdown(self): + def shutdown(self): # pragma: no cover """Initiate the server shutdown sequence. :returns: whether the server was shutdown successfully @@ -301,7 +311,7 @@ def shutdown(self): raise ServerShutdownError(res) return True - def run_tests(self, tests): + def run_tests(self, tests): # pragma: no cover """Run the available unittests on the server. :param tests: list of files containing the test suites @@ -384,7 +394,7 @@ def read_log(self, params['sort'] = sort res = self._conn.get('/_admin/log') if res.status_code not in HTTP_OK: - ServerReadLogError(res) + raise ServerReadLogError(res) if 'totalAmount' in res.body: res.body['total_amount'] = res.body.pop('totalAmount') return res.body @@ -413,7 +423,7 @@ def databases(self, user_only=False): :type user_only: bool :returns: the database names :rtype: list - :raises arango.exceptions.DatabasesListError: if the database names + :raises arango.exceptions.DatabaseListError: if the database names cannot be retrieved from the server """ # Get the current user's databases @@ -422,7 +432,7 @@ def databases(self, user_only=False): if user_only else '/_api/database' ) if res.status_code not in HTTP_OK: - raise DatabasesListError(res) + raise DatabaseListError(res) return res.body['result'] def db(self, name, username=None, password=None): @@ -523,12 +533,12 @@ def users(self): :returns: the mapping of usernames to user details :rtype: list - :raises arango.exceptions.UsersListError: if the details on the users + :raises arango.exceptions.UserListError: if the details on the users cannot be retrieved from the server """ res = self._conn.get('/_api/user') if res.status_code not in HTTP_OK: - raise UsersListError(res) + raise UserListError(res) return [{ 'username': record['user'], @@ -763,3 +773,53 @@ def revoke_user_access(self, username, database): if res.status_code not in HTTP_OK: raise UserRevokeAccessError(res) return not res.body.get('error') + + ######################## + # Async Job Management # + ######################## + + def async_jobs(self, status, count=None): + """Retrieve the IDs of the asynchronous jobs with the given status. + + :param status: the job status which can be ``"pending"`` or ``"done"`` + :type status: str + :param count: the maximum number of job IDs to return per call + :type count: int + :returns: the IDs the of the asynchronous jobs + :rtype: + :raises arango.exceptions.AsyncJobListError: if the list of async job + IDs cannot be retrieved from the server + """ + res = self._conn.get( + '/_api/job/{}'.format(status), + params={} if count is None else {'count': count} + ) + if res.status_code not in HTTP_OK: + raise AsyncJobListError(res) + return res.body + + def clear_async_jobs(self, threshold=None): + """Delete asynchronous job results from the server. + + :param threshold: if specified, only the job results created before + the threshold (a unix timestamp) are deleted, otherwise all job + results are deleted + :type threshold: int + :returns: whether the deletion of results was successful + :rtype: bool + :raises arango.exceptions.AsyncJobClearError: if the async job results + cannot be deleted from the server + + .. note:: + Async jobs that are currently queued or running are not stopped. + """ + if threshold is None: + res = self._conn.delete('/_api/job/all') + else: + res = self._conn.delete( + '/_api/job/expired', + params={'stamp': threshold} + ) + if res.status_code not in HTTP_OK: + raise AsyncJobClearError(res) + return True diff --git a/arango/cluster.py b/arango/cluster.py new file mode 100644 index 00000000..39106e4c --- /dev/null +++ b/arango/cluster.py @@ -0,0 +1,120 @@ +from __future__ import absolute_import, unicode_literals + +from arango.aql import AQL +from arango.collections import Collection +from arango.connection import Connection +from arango.exceptions import ClusterTestError +from arango.graph import Graph +from arango.utils import HTTP_OK + + +class ClusterTest(Connection): + """ArangoDB cluster round-trip test for sharding. + + :param connection: ArangoDB database connection + :type connection: arango.connection.Connection + :param shard_id: the ID of the shard to which the request is sent + :type shard_id: str + :param transaction_id: the transaction ID for the request + :type transaction_id: str + :param timeout: the timeout in seconds for the cluster operation, where + an error is returned if the response does not arrive within the given + limit (default: 24 hrs) + :type timeout: int + :param sync: if set to ``True``, the test uses synchronous mode, otherwise + asynchronous mode is used (this is mainly for debugging purposes) + :param sync: bool + """ + + def __init__(self, + connection, + shard_id, + transaction_id=None, + timeout=None, + sync=None): + super(ClusterTest, self).__init__( + protocol=connection.protocol, + host=connection.host, + port=connection.port, + username=connection.username, + password=connection.password, + http_client=connection.http_client, + database=connection.database, + enable_logging=connection.has_logging + ) + self._shard_id = shard_id + self._trans_id = transaction_id + self._timeout = timeout + self._sync = sync + self._aql = AQL(self) + self._type = 'cluster' + + def __repr__(self): + return '' + + def handle_request(self, request, handler): + """Handle the incoming request and response handler. + + :param request: the API request to be placed in the server-side queue + :type request: arango.request.Request + :param handler: the response handler + :type handler: callable + :returns: the test results + :rtype: dict + :raises arango.exceptions.ClusterTestError: if the cluster round-trip + test cannot be executed + """ + request.headers['X-Shard-ID'] = str(self._shard_id) + if self._trans_id is not None: + request.headers['X-Client-Transaction-ID'] = str(self._trans_id) + if self._timeout is not None: + request.headers['X-Timeout'] = str(self._timeout) + if self._sync is True: + request.headers['X-Synchronous-Mode'] = 'true' + + request.endpoint = '/_admin/cluster-test' + request.endpoint + '11' + res = getattr(self, request.method)(**request.kwargs) + if res.status_code not in HTTP_OK: + raise ClusterTestError(res) + return res.body # pragma: no cover + + @property + def aql(self): + """Return the AQL object tailored for asynchronous execution. + + API requests via the returned query object are placed in a server-side + in-memory task queue and executed asynchronously in a fire-and-forget + style. + + :returns: ArangoDB query object + :rtype: arango.query.AQL + """ + return self._aql + + def collection(self, name): + """Return a collection object tailored for asynchronous execution. + + API requests via the returned collection object are placed in a + server-side in-memory task queue and executed asynchronously in + a fire-and-forget style. + + :param name: the name of the collection + :type name: str + :returns: the collection object + :rtype: arango.collections.Collection + """ + return Collection(self, name) + + def graph(self, name): + """Return a graph object tailored for asynchronous execution. + + API requests via the returned graph object are placed in a server-side + in-memory task queue and executed asynchronously in a fire-and-forget + style. + + :param name: the name of the graph + :type name: str + :returns: the graph object + :rtype: arango.graph.Graph + """ + return Graph(self, name) diff --git a/arango/collections/base.py b/arango/collections/base.py index 27f7f755..778a03aa 100644 --- a/arango/collections/base.py +++ b/arango/collections/base.py @@ -56,12 +56,12 @@ def __len__(self): :returns: the number of documents :rtype: int - :raises arango.exceptions.CollectionGetCountError: if the document + :raises arango.exceptions.DocumentCountError: if the document count cannot be retrieved """ res = self._conn.get('/_api/collection/{}/count'.format(self._name)) if res.status_code not in HTTP_OK: - raise CollectionGetCountError(res) + raise DocumentCountError(res) return res.body['count'] def __getitem__(self, key): @@ -75,7 +75,7 @@ def __getitem__(self, key): be fetched from the collection """ res = self._conn.get('/_api/document/{}/{}'.format(self._name, key)) - if res.status_code == 404: + if res.status_code == 404 and res.error_code == 1202: return None elif res.status_code not in HTTP_OK: raise DocumentGetError(res) @@ -88,15 +88,15 @@ def __contains__(self, key): :type key: dict | str :returns: whether the document exists :rtype: bool - :raises arango.exceptions.CollectionContainsError: if the check cannot + :raises arango.exceptions.DocumentInError: if the check cannot be executed """ - res = self._conn.head('/_api/document/{}/{}'.format(self._name, key)) - if res.status_code in HTTP_OK: - return True - elif res.status_code == 404: + res = self._conn.get('/_api/document/{}/{}'.format(self._name, key)) + if res.status_code == 404 and res.error_code == 1202: return False - raise CollectionContainsError(res) + elif res.status_code in HTTP_OK: + return True + raise DocumentInError(res) def _status(self, code): """Return the collection status text. @@ -161,7 +161,7 @@ def statistics(self): :returns: the collection statistics :rtype: dict - :raises arango.exceptions.CollectionGetStatisticsError: if the + :raises arango.exceptions.CollectionStatisticsError: if the collection statistics cannot be retrieved """ request = Request( @@ -171,7 +171,7 @@ def statistics(self): def handler(res): if res.status_code not in HTTP_OK: - raise CollectionGetStatisticsError(res) + raise CollectionStatisticsError(res) stats = res.body['figures'] stats['compaction_status'] = stats.pop('compactionStatus', None) stats['document_refs'] = stats.pop('documentReferences', None) @@ -190,7 +190,7 @@ def revision(self): :returns: the collection revision :rtype: str - :raises arango.exceptions.CollectionGetRevisionError: if the + :raises arango.exceptions.CollectionRevisionError: if the collection revision cannot be retrieved """ request = Request( @@ -200,7 +200,7 @@ def revision(self): def handler(res): if res.status_code not in HTTP_OK: - raise CollectionGetRevisionError(res) + raise CollectionRevisionError(res) return res.body['revision'] return request, handler @@ -211,7 +211,7 @@ def properties(self): :returns: the collection properties :rtype: dict - :raises arango.exceptions.CollectionGetPropertiesError: if the + :raises arango.exceptions.CollectionPropertiesError: if the collection properties cannot be retrieved """ request = Request( @@ -221,7 +221,7 @@ def properties(self): def handler(res): if res.status_code not in HTTP_OK: - raise CollectionGetPropertiesError(res) + raise CollectionPropertiesError(res) result = { 'id': res.body['id'], 'name': res.body['name'], @@ -285,11 +285,9 @@ def handler(res): 'journal_size': res.body['journalSize'], 'keygen': res.body['keyOptions']['type'], 'user_keys': res.body['keyOptions']['allowUserKeys'], + 'key_increment': res.body['keyOptions'].get('increment'), + 'key_offset': res.body['keyOptions'].get('offset') } - if 'increment' in res.body['keyOptions']: - result['key_increment'] = res.body['keyOptions']['increment'] - if 'offset' in res.body['keyOptions']: - result['key_offset'] = res.body['keyOptions']['offset'] return result return request, handler @@ -356,7 +354,7 @@ def rotate(self): def handler(res): if res.status_code not in HTTP_OK: raise CollectionRotateJournalError(res) - return res.body['result'] + return res.body['result'] # pragma: no cover return request, handler @@ -372,7 +370,7 @@ def checksum(self, with_rev=False, with_data=False): :type with_data: bool :returns: the collection checksum :rtype: int - :raises arango.exceptions.CollectionGetChecksumError: if the + :raises arango.exceptions.CollectionChecksumError: if the collection checksum cannot be retrieved """ request = Request( @@ -383,7 +381,7 @@ def checksum(self, with_rev=False, with_data=False): def handler(res): if res.status_code not in HTTP_OK: - raise CollectionGetChecksumError(res) + raise CollectionChecksumError(res) return int(res.body['checksum']) return request, handler @@ -422,7 +420,7 @@ def count(self): :returns: the number of documents :rtype: int - :raises arango.exceptions.CollectionGetCountError: if the document + :raises arango.exceptions.DocumentCountError: if the document count cannot be retrieved """ request = Request( @@ -432,7 +430,7 @@ def count(self): def handler(res): if res.status_code not in HTTP_OK: - raise CollectionGetCountError(res) + raise DocumentCountError(res) return res.body['count'] return request, handler @@ -455,7 +453,7 @@ def has(self, key, rev=None, match_rev=True): :rtype: bool :raises arango.exceptions.DocumentRevisionError: if the given revision does not match the revision of the retrieved document - :raises arango.exceptions.CollectionContainsError: if the check cannot + :raises arango.exceptions.DocumentInError: if the check cannot be executed """ request = Request( @@ -470,11 +468,11 @@ def has(self, key, rev=None, match_rev=True): def handler(res): if res.status_code in {304, 412}: raise DocumentRevisionError(res) - elif res.status_code == 404: + elif res.status_code == 404 and res.error_code == 1202: return False elif res.status_code in HTTP_OK: return True - raise CollectionContainsError(res) + raise DocumentInError(res) return request, handler @@ -507,7 +505,7 @@ def all(self, :param ttl: time-to-live for the cursor on the server :type ttl: int :param filter_fields: list of document fields to filter by - :type filter_fields: dict + :type filter_fields: list :param filter_type: ``"include"`` (default) or ``"exclude"`` :type filter_type: str :returns: the document cursor @@ -520,9 +518,9 @@ def all(self, time of the retrieval are *not* included by the server cursor """ data = {'count': count} - if flush is not None: + if flush is not None: # pragma: no cover data['flush'] = flush - if flush_wait is not None: + if flush_wait is not None: # pragma: no cover data['flushWait'] = flush_wait if batch_size is not None: data['batchSize'] = batch_size @@ -838,7 +836,7 @@ def find_in_box(self, if limit is not None: data['limit'] = limit if geo_field is not None: - data['geo'] = geo_field + data['geo'] = '/'.join([self._name, geo_field]) request = Request( method='put', @@ -900,7 +898,7 @@ def indexes(self): :returns: the collection indexes :rtype: [dict] - :raises arango.exceptions.IndexesListError: if the list of indexes + :raises arango.exceptions.IndexListError: if the list of indexes cannot be retrieved """ request = Request( @@ -911,23 +909,19 @@ def indexes(self): def handler(res): if res.status_code not in HTTP_OK: - raise IndexesListError(res) + raise IndexListError(res) indexes = [] for index in res.body['indexes']: index['id'] = index['id'].split('/', 1)[1] if 'minLength' in index: index['min_length'] = index.pop('minLength') - if 'byteSize' in index: - index['byte_size'] = index.pop('byteSize') if 'geoJson' in index: index['geo_json'] = index.pop('geoJson') if 'ignoreNull' in index: index['ignore_none'] = index.pop('ignoreNull') if 'selectivityEstimate' in index: index['selectivity'] = index.pop('selectivityEstimate') - if 'isNewlyCreated' in index: - index['new'] = index.pop('isNewlyCreated') indexes.append(index) return indexes @@ -951,8 +945,6 @@ def handler(res): details.pop('code', None) if 'minLength' in details: details['min_length'] = details.pop('minLength') - if 'byteSize' in details: - details['byte_size'] = details.pop('byteSize') if 'geoJson' in details: details['geo_json'] = details.pop('geoJson') if 'ignoreNull' in details: @@ -1012,7 +1004,7 @@ def add_skiplist_index(self, fields, unique=None, sparse=None): return self._add_index(data) @api_method - def add_geo_index(self, fields, ordered=None, unique=None): + def add_geo_index(self, fields, ordered=None): """Create a geo-spatial index in the collection. :param fields: if given a single field, the index is created using its @@ -1022,8 +1014,6 @@ def add_geo_index(self, fields, ordered=None, unique=None): :type fields: list :param ordered: whether the order is longitude -> latitude :type ordered: bool - :param unique: whether the index is unique - :type unique: bool :returns: the details on the new index :rtype: dict :raises arango.exceptions.IndexCreateError: if the geo-spatial index @@ -1032,8 +1022,6 @@ def add_geo_index(self, fields, ordered=None, unique=None): data = {'type': 'geo', 'fields': fields} if ordered is not None: data['geoJson'] = ordered - if unique is not None: - data['unique'] = unique return self._add_index(data) @api_method @@ -1093,13 +1081,15 @@ def add_persistent_index(self, fields, unique=None, sparse=None): return self._add_index(data) @api_method - def delete_index(self, index_id): + def delete_index(self, index_id, ignore_missing=False): """Delete an index from the collection. :param index_id: the ID of the index to delete :type index_id: str + :param ignore_missing: ignore missing indexes + :type ignore_missing: bool :returns: whether the index was deleted successfully - :rtype: dict + :rtype: bool :raises arango.exceptions.IndexDeleteError: if the specified index cannot be deleted from the collection """ @@ -1109,6 +1099,10 @@ def delete_index(self, index_id): ) def handler(res): + if res.status_code == 404 and res.error_code == 1212: + if ignore_missing: + return False + raise IndexDeleteError(res) if res.status_code not in HTTP_OK: raise IndexDeleteError(res) return not res.body['error'] diff --git a/arango/collections/edge.py b/arango/collections/edge.py index 32ff0c00..3df63dc9 100644 --- a/arango/collections/edge.py +++ b/arango/collections/edge.py @@ -72,7 +72,7 @@ def get(self, key, rev=None): def handler(res): if res.status_code == 412: raise DocumentRevisionError(res) - elif res.status_code == 404: + elif res.status_code == 404 and res.error_code == 1202: return None elif res.status_code not in HTTP_OK: raise DocumentGetError(res) @@ -258,7 +258,7 @@ def delete(self, document, ignore_missing=False, sync=None): def handler(res): if res.status_code == 412: raise DocumentRevisionError(res) - elif res.status_code == 404: + elif res.status_code == 404 and res.error_code == 1202: if ignore_missing: return False raise DocumentDeleteError(res) diff --git a/arango/collections/standard.py b/arango/collections/standard.py index 8d60cc01..821e0c2b 100644 --- a/arango/collections/standard.py +++ b/arango/collections/standard.py @@ -64,7 +64,7 @@ def get(self, key, rev=None, match_rev=True): def handler(res): if res.status_code in {304, 412}: raise DocumentRevisionError(res) - elif res.status_code == 404: + elif res.status_code == 404 and res.error_code == 1202: return None elif res.status_code in HTTP_OK: return res.body diff --git a/arango/collections/vertex.py b/arango/collections/vertex.py index 77c1c20b..73fd3e37 100644 --- a/arango/collections/vertex.py +++ b/arango/collections/vertex.py @@ -74,7 +74,7 @@ def get(self, key, rev=None): def handler(res): if res.status_code == 412: raise DocumentRevisionError(res) - elif res.status_code == 404: + elif res.status_code == 404 and res.error_code == 1202: return None elif res.status_code not in HTTP_OK: raise DocumentGetError(res) @@ -221,7 +221,7 @@ def handler(res): return request, handler @api_method - def delete(self, document, ignore_missing=True, sync=None): + def delete(self, document, ignore_missing=False, sync=None): """Delete a document by its key from the vertex collection. The ``"_key"`` field must be present in **document**. If the ``"_rev"`` @@ -262,7 +262,7 @@ def delete(self, document, ignore_missing=True, sync=None): def handler(res): if res.status_code == 412: raise DocumentRevisionError(res) - elif res.status_code == 404: + elif res.status_code == 404 and res.error_code == 1202: if ignore_missing: return False raise DocumentDeleteError(res) diff --git a/arango/cursor.py b/arango/cursor.py index 1ca9d986..5ae14648 100644 --- a/arango/cursor.py +++ b/arango/cursor.py @@ -35,13 +35,10 @@ def __enter__(self): return self def __exit__(self, *_): - self.close() + self.close(ignore_missing=True) def __repr__(self): - cursor_id = self.id - if cursor_id is None: - return '' - return ''.format(cursor_id) + return ''.format(self.id) @property def id(self): @@ -102,7 +99,6 @@ def statistics(self): stats['scanned_index'] = stats.pop('scannedIndex', None) stats['execution_time'] = stats.pop('executionTime', None) return stats - return None def warnings(self): """Return any warnings (e.g. from the query execution). @@ -112,7 +108,6 @@ def warnings(self): """ if 'extra' in self._data and 'warnings' in self._data['extra']: return self._data['extra']['warnings'] - return None def next(self): """Read the next result from the cursor. @@ -127,7 +122,6 @@ def next(self): raise CursorNextError(res) self._data = res.body elif not self.batch() and not self.has_more(): - self.close() raise StopIteration return self.batch().pop(0) @@ -142,7 +136,7 @@ def close(self, ignore_missing=True): """ if not self.id: return False - res = self._conn.delete("/api/cursor/{}".format(self.id)) + res = self._conn.delete("/_api/cursor/{}".format(self.id)) if res.status_code not in HTTP_OK: if res.status_code == 404 and ignore_missing: return False diff --git a/arango/database.py b/arango/database.py index fda3ad00..901215e8 100644 --- a/arango/database.py +++ b/arango/database.py @@ -2,6 +2,7 @@ from arango.async import AsyncExecution from arango.batch import BatchExecution +from arango.cluster import ClusterTest from arango.collections import Collection from arango.utils import HTTP_OK from arango.exceptions import * @@ -28,6 +29,15 @@ def __repr__(self): def __getitem__(self, name): return self.collection(name) + @property + def connection(self): + """Return the connection object. + + :return: the database connection object + :rtype: arango.connection.Connection + """ + return self._conn + @property def name(self): """Return the name of the database. @@ -107,17 +117,41 @@ def transaction(self, commit_on_error=commit_on_error ) + def cluster(self, shard_id, transaction_id=None, timeout=None, sync=None): + """Return the cluster round-trip test object. + + :param shard_id: the ID of the shard to which the request is sent + :type shard_id: str + :param transaction_id: the transaction ID for the request + :type transaction_id: str + :param timeout: the timeout in seconds for the cluster operation, where + an error is returned if the response does not arrive within the + given limit (default: 24 hrs) + :type timeout: int + :param sync: if set to ``True``, the test uses synchronous mode, + otherwise asynchronous mode is used (this is mainly for debugging + purposes) + :param sync: bool + """ + return ClusterTest( + connection=self._conn, + shard_id=shard_id, + transaction_id=transaction_id, + timeout=timeout, + sync=sync + ) + def properties(self): """Return the database properties. :returns: the database properties :rtype: dict - :raises arango.exceptions.DatabaseGetPropertiesError: if the properties + :raises arango.exceptions.DatabasePropertiesError: if the properties of the database cannot be retrieved """ res = self._conn.get('/_api/database/current') if res.status_code not in HTTP_OK: - raise DatabaseGetPropertiesError(res) + raise DatabasePropertiesError(res) result = res.body['result'] result['system'] = result.pop('isSystem') return result @@ -130,13 +164,13 @@ def collections(self): """Return the collections in the database. :returns: the details of the collections in the database - :rtype: list - :raises arango.exceptions.CollectionsListError: if the list of + :rtype: [dict] + :raises arango.exceptions.CollectionListError: if the list of collections cannot be retrieved """ res = self._conn.get('/_api/collection') if res.status_code not in HTTP_OK: - raise CollectionsListError(res) + raise CollectionListError(res) return [{ 'id': col['id'], 'name': col['name'], @@ -268,12 +302,12 @@ def graphs(self): :returns: the graphs in the database :rtype: dict - :raises arango.exceptions.GraphsListError: if the list of graphs + :raises arango.exceptions.GraphListError: if the list of graphs cannot be retrieved """ res = self._conn.get('/_api/gharial') if res.status_code not in HTTP_OK: - raise GraphsListError(res) + raise GraphListError(res) return [ { 'name': graph['_key'], @@ -299,9 +333,19 @@ def create_graph(self, orphan_collections=None): """Create a new graph in the database. + An edge definition should look like this: + + .. code-block:: python + + { + 'name': 'edge_collection_name', + 'from_collections': ['from_vertex_collection_name'], + 'to_collections': ['to_vertex_collection_name'] + } + :param name: name of the new graph :type name: str - :param edge_definitions: definitions for edges + :param edge_definitions: list of edge definitions :type edge_definitions: list :param orphan_collections: names of additional vertex collections :type orphan_collections: list @@ -312,7 +356,11 @@ def create_graph(self, """ data = {'name': name} if edge_definitions is not None: - data['edgeDefinitions'] = edge_definitions + data['edgeDefinitions'] = [{ + 'collection': definition['name'], + 'from': definition['from_collections'], + 'to': definition['to_collections'] + } for definition in edge_definitions] if orphan_collections is not None: data['orphanCollections'] = orphan_collections @@ -348,12 +396,12 @@ def tasks(self): :returns: the server tasks that are currently active :rtype: [dict] - :raises arango.exceptions.TasksListError: if the list of active server + :raises arango.exceptions.TaskListError: if the list of active server tasks cannot be retrieved from the server """ res = self._conn.get('/_api/tasks') if res.status_code not in HTTP_OK: - raise TasksListError(res) + raise TaskListError(res) return res.body def task(self, task_id): diff --git a/arango/exceptions.py b/arango/exceptions.py index 2ce9dc54..d1958ca0 100644 --- a/arango/exceptions.py +++ b/arango/exceptions.py @@ -21,7 +21,7 @@ def __init__(self, data, message=None): error_message = data.error_message elif data.status_text is not None: error_message = data.status_text - else: + else: # pragma: no cover error_message = "request failed" # Get the ArangoDB error number if provided @@ -65,19 +65,19 @@ class ServerConnectionError(ArangoError): """Failed to connect to the ArangoDB instance.""" -class ServerGetEndpointsError(ArangoError): +class ServerEndpointsError(ArangoError): """Failed to retrieve the ArangoDB server endpoints.""" -class ServerGetVersionError(ArangoError): +class ServerVersionError(ArangoError): """Failed to retrieve the ArangoDB server version.""" -class ServerGetDetailsError(ArangoError): +class ServerDetailsError(ArangoError): """Failed to retrieve the ArangoDB server details.""" -class ServerGetTimeError(ArangoError): +class ServerTimeError(ArangoError): """Failed to return the current ArangoDB system time.""" @@ -101,7 +101,7 @@ class ServerExecuteError(ArangoError): """Failed to execute a the given Javascript program on the server.""" -class ServerGetRequiredVersionError(ArangoError): +class ServerRequiredDBVersionError(ArangoError): """Failed to retrieve the required database version.""" @@ -113,11 +113,11 @@ class ServerReloadRoutingError(ArangoError): """Failed to reload the routing information.""" -class ServerGetStatisticsError(ArangoError): +class ServerStatisticsError(ArangoError): """Failed to retrieve the server statistics.""" -class ServerGetRoleError(ArangoError): +class ServerRoleError(ArangoError): """Failed to retrieve the role of the server in a cluster.""" @@ -126,11 +126,7 @@ class ServerGetRoleError(ArangoError): ############################## -class WALFlushError(ArangoError): - """Failed to flush the write-ahead log.""" - - -class WALGetPropertiesError(ArangoError): +class WALPropertiesError(ArangoError): """Failed to retrieve the write-ahead log.""" @@ -138,12 +134,20 @@ class WALConfigureError(ArangoError): """Failed to configure the write-ahead log.""" +class WALTransactionListError(ArangoError): + """Failed to retrieve the list of running transactions.""" + + +class WALFlushError(ArangoError): + """Failed to flush the write-ahead log.""" + + ################### # Task Exceptions # ################### -class TasksListError(ArangoError): +class TaskListError(ArangoError): """Failed to list the active server tasks.""" @@ -164,11 +168,11 @@ class TaskDeleteError(ArangoError): ####################### -class DatabasesListError(ArangoError): +class DatabaseListError(ArangoError): """Failed to retrieve the list of databases.""" -class DatabaseGetPropertiesError(ArangoError): +class DatabasePropertiesError(ArangoError): """Failed to retrieve the database options.""" @@ -185,7 +189,7 @@ class DatabaseDeleteError(ArangoError): ################### -class UsersListError(ArangoError): +class UserListError(ArangoError): """Failed to retrieve the users.""" @@ -222,19 +226,11 @@ class UserRevokeAccessError(ArangoError): ######################### -class CollectionsListError(ArangoError): +class CollectionListError(ArangoError): """Failed to retrieve the list of collections.""" -class CollectionGetCountError(ArangoError): - """Failed to retrieve the count of the documents in the collections.""" - - -class CollectionContainsError(ArangoError): - """Failed to check whether a collection contains a document.""" - - -class CollectionGetPropertiesError(ArangoError): +class CollectionPropertiesError(ArangoError): """Failed to retrieve the collection properties.""" @@ -242,15 +238,15 @@ class CollectionConfigureError(ArangoError): """Failed to configure the collection properties.""" -class CollectionGetStatisticsError(ArangoError): +class CollectionStatisticsError(ArangoError): """Failed to retrieve the collection statistics.""" -class CollectionGetRevisionError(ArangoError): +class CollectionRevisionError(ArangoError): """Failed to retrieve the collection revision.""" -class CollectionGetChecksumError(ArangoError): +class CollectionChecksumError(ArangoError): """Failed to retrieve the collection checksum.""" @@ -291,6 +287,14 @@ class CollectionBadStatusError(ArangoError): ####################### +class DocumentCountError(ArangoError): + """Failed to retrieve the count of the documents in the collections.""" + + +class DocumentInError(ArangoError): + """Failed to check whether a collection contains a document.""" + + class DocumentGetError(ArangoError): """Failed to retrieve the document.""" @@ -320,7 +324,7 @@ class DocumentRevisionError(ArangoError): #################### -class IndexesListError(ArangoError): +class IndexListError(ArangoError): """Failed to retrieve the list of indexes in the collection.""" @@ -332,9 +336,9 @@ class IndexDeleteError(ArangoError): """Failed to delete the index from the collection.""" -#################### +################## # AQL Exceptions # -#################### +################## class AQLQueryExplainError(ArangoError): @@ -353,7 +357,7 @@ class AQLCacheClearError(ArangoError): """Failed to clear the AQL query cache.""" -class AQLCacheGetPropertiesError(ArangoError): +class AQLCachePropertiesError(ArangoError): """Failed to retrieve the AQL query cache properties.""" @@ -361,7 +365,7 @@ class AQLCacheConfigureError(ArangoError): """Failed to configure the AQL query cache properties.""" -class AQLFunctionsListError(ArangoError): +class AQLFunctionListError(ArangoError): """Failed to retrieve the list of AQL user functions.""" @@ -395,10 +399,6 @@ class TransactionError(ArangoError): """Failed to execute a transaction.""" -class TransactionsListError(ArangoError): - """Failed to retrieve the list of running transactions.""" - - #################### # Batch Exceptions # #################### @@ -417,27 +417,19 @@ class AsyncExecuteError(ArangoError): """Failed to execute the asynchronous request.""" -class AsyncJobInvalidError(ArangoError): - """Failed to retrieve the asynchronous job ID.""" - - -class AsyncJobNotDoneError(ArangoError): - """The asynchronous job is still pending in the queue.""" - - -class AsyncJobNotFoundError(ArangoError): - """Failed to find the asynchronous job (already deleted/fetched)""" +class AsyncJobListError(ArangoError): + """Failed to list the IDs of the asynchronous jobs.""" class AsyncJobCancelError(ArangoError): """Failed to cancel the asynchronous job.""" -class AsyncJobGetStatusError(ArangoError): +class AsyncJobStatusError(ArangoError): """Failed to retrieve the asynchronous job result from the server.""" -class AsyncJobGetResultError(ArangoError): +class AsyncJobResultError(ArangoError): """Failed to pop the asynchronous job result from the server.""" @@ -445,12 +437,21 @@ class AsyncJobClearError(ArangoError): """Failed to delete the asynchronous job result from the server.""" +########################### +# Cluster Test Exceptions # +########################### + + +class ClusterTestError(ArangoError): + """Failed to execute the cluster round-trip for sharding.""" + + #################### # Graph Exceptions # #################### -class GraphsListError(ArangoError): +class GraphListError(ArangoError): """Failed to retrieve the list of graphs.""" @@ -466,7 +467,7 @@ class GraphDeleteError(ArangoError): """Failed to delete the graph.""" -class GraphGetPropertiesError(ArangoError): +class GraphPropertiesError(ArangoError): """Failed to retrieve the graph properties.""" @@ -474,11 +475,11 @@ class GraphTraverseError(ArangoError): """Failed to execute the graph traversal.""" -class OrphanCollectionsListError(ArangoError): +class OrphanCollectionListError(ArangoError): """Failed to retrieve the list of orphaned vertex collections.""" -class VertexCollectionsListError(ArangoError): +class VertexCollectionListError(ArangoError): """Failed to retrieve the list of vertex collections.""" @@ -490,7 +491,7 @@ class VertexCollectionDeleteError(ArangoError): """Failed to delete the vertex collection.""" -class EdgeDefinitionsListError(ArangoError): +class EdgeDefinitionListError(ArangoError): """Failed to retrieve the list of edge definitions.""" diff --git a/arango/graph.py b/arango/graph.py index 31a1fc18..79a822b2 100644 --- a/arango/graph.py +++ b/arango/graph.py @@ -63,7 +63,7 @@ def properties(self): :returns: the graph properties :rtype: dict - :raises arango.exceptions.GraphGetPropertiesError: if the properties + :raises arango.exceptions.GraphPropertiesError: if the properties of the graph cannot be retrieved """ request = Request( @@ -73,7 +73,7 @@ def properties(self): def handler(res): if res.status_code not in HTTP_OK: - raise GraphGetPropertiesError(res) + raise GraphPropertiesError(res) graph = res.body['graph'] return { 'id': graph['_id'], @@ -92,7 +92,7 @@ def orphan_collections(self): :returns: the names of the orphan vertex collections :rtype: list - :raises arango.exceptions.OrphanCollectionsListError: if the list of + :raises arango.exceptions.OrphanCollectionListError: if the list of orphan vertex collections cannot be retrieved """ request = Request( @@ -102,7 +102,7 @@ def orphan_collections(self): def handler(res): if res.status_code not in HTTP_OK: - raise OrphanCollectionsListError(res) + raise OrphanCollectionListError(res) return res.body['graph']['orphanCollections'] return request, handler @@ -113,7 +113,7 @@ def vertex_collections(self): :returns: the names of the vertex collections :rtype: list - :raises arango.exceptions.VertexCollectionsListError: if the list of + :raises arango.exceptions.VertexCollectionListError: if the list of vertex collections cannot be retrieved """ request = Request( @@ -123,7 +123,7 @@ def vertex_collections(self): def handler(res): if res.status_code not in HTTP_OK: - raise VertexCollectionsListError(res) + raise VertexCollectionListError(res) return res.body['collections'] return request, handler @@ -188,7 +188,7 @@ def edge_definitions(self): :returns: the edge definitions of the graph :rtype: list - :raises arango.exceptions.EdgeDefinitionsListError: if the list of + :raises arango.exceptions.EdgeDefinitionListError: if the list of edge definitions cannot be retrieved """ request = Request( @@ -198,7 +198,7 @@ def edge_definitions(self): def handler(res): if res.status_code not in HTTP_OK: - raise EdgeDefinitionsListError(res) + raise EdgeDefinitionListError(res) return [ { 'name': edge_definition['collection'], @@ -379,13 +379,10 @@ def traverse(self, attributes edge and vertex :type expander_func: str :returns: the visited edges and vertices - :rtype: list + :rtype: dict :raises arango.exceptions.GraphTraverseError: if the graph traversal cannot be executed """ - if expander_func is None and direction is None: - direction = 'any' - if strategy is not None: if strategy.lower() == 'dfs': strategy = 'depthfirst' diff --git a/arango/http_clients/base.py b/arango/http_clients/base.py index 6327c5ee..17396997 100644 --- a/arango/http_clients/base.py +++ b/arango/http_clients/base.py @@ -1,7 +1,7 @@ from abc import ABCMeta, abstractmethod -class BaseHTTPClient(object): +class BaseHTTPClient(object): # pragma: no cover """Base class for ArangoDB clients. The methods must return an instance of :class:`arango.response.Response`. diff --git a/arango/http_clients/default.py b/arango/http_clients/default.py index 6f396e18..4606bd48 100644 --- a/arango/http_clients/default.py +++ b/arango/http_clients/default.py @@ -19,11 +19,6 @@ def __init__(self, use_session=True): else: self._session = requests - def close_session(self): - """Close the HTTP session.""" - if isinstance(self._session, requests.Session): - self._session.close() - def head(self, url, params=None, headers=None, auth=None): """Execute an HTTP **HEAD** method. diff --git a/arango/request.py b/arango/request.py index afb4e4c3..38752a6c 100644 --- a/arango/request.py +++ b/arango/request.py @@ -8,7 +8,8 @@ class Request(object): """ArangoDB API request object. - This class is meant to be used internally only. + .. note:: + This class is meant to be used internally only. """ __slots__ = ( @@ -34,11 +35,6 @@ def __init__(self, self.data = data self.command = command - def __repr__(self): - return "".format( - self.method.upper(), self.endpoint - ) - @property def kwargs(self): return { diff --git a/arango/response.py b/arango/response.py index 64a0443c..082c46ab 100644 --- a/arango/response.py +++ b/arango/response.py @@ -17,6 +17,9 @@ class Response(object): :type http_text: str :param body: the HTTP response body :type body: str | dict + + .. note:: + This class is meant to be used internally only. """ __slots__ = ( diff --git a/arango/transaction.py b/arango/transaction.py index 44f87067..62eee8ec 100644 --- a/arango/transaction.py +++ b/arango/transaction.py @@ -48,7 +48,8 @@ def __init__(self, username=connection.username, password=connection.password, http_client=connection.http_client, - database=connection.database + database=connection.database, + enable_logging=connection.has_logging ) self._id = uuid4() self._actions = ['db = require("internal").db'] @@ -95,23 +96,32 @@ def handle_request(self, request, handler): raise TransactionError('unsupported method') self._actions.append(request.command) - def execute(self, command, params=None): + def execute(self, command, params=None, sync=None, timeout=None): """Execute raw Javascript code in a transaction. :param command: the raw Javascript code :type command: str :param params: optional arguments passed into the code :type params: dict + :param sync: wait for the operation to sync to disk (overrides the + value specified during the transaction object instantiation) + :type sync: bool + :param timeout: timeout on the collection locks (overrides the value + value specified during the transaction object instantiation) + :type timeout: int :return: the result of the transaction :rtype: dict :raises arango.exceptions.TransactionError: if the transaction cannot be executed """ data = {'collections': self._collections, 'action': command} - if self._timeout is not None: - data['lockTimeout'] = self._timeout - if self._sync is not None: - data['waitForSync'] = self._sync + timeout = self._timeout if timeout is None else timeout + sync = self._sync if sync is None else sync + + if timeout is not None: + data['lockTimeout'] = timeout + if sync is not None: + data['waitForSync'] = sync if params is not None: data['params'] = params diff --git a/arango/version.py b/arango/version.py index aaa42644..59a2fe6f 100644 --- a/arango/version.py +++ b/arango/version.py @@ -1 +1 @@ -VERSION = '3.0.0' +VERSION = '3.1.0' diff --git a/arango/wal.py b/arango/wal.py index 1b39b125..5cdb0496 100644 --- a/arango/wal.py +++ b/arango/wal.py @@ -3,9 +3,9 @@ from arango.utils import HTTP_OK from arango.exceptions import ( WALFlushError, - WALGetPropertiesError, + WALPropertiesError, WALConfigureError, - TransactionsListError + WALTransactionListError ) @@ -27,12 +27,12 @@ def properties(self): :returns: the configuration of the write-ahead log :rtype: dict - :raises arango.exceptions.WALGetPropertiesError: if the WAL properties + :raises arango.exceptions.WALPropertiesError: if the WAL properties cannot be retrieved from the server """ res = self._conn.get('/_admin/wal/properties') if res.status_code not in HTTP_OK: - raise WALGetPropertiesError(res) + raise WALPropertiesError(res) return { 'oversized_ops': res.body.get('allowOversizeEntries'), 'log_size': res.body.get('logfileSize'), @@ -61,7 +61,7 @@ def configure(self, oversized_ops=None, log_size=None, historic_logs=None, :type throttle_limit: int :returns: the new configuration of the write-ahead log :rtype: dict - :raises arango.exceptions.WALGetPropertiesError: if the WAL properties + :raises arango.exceptions.WALPropertiesError: if the WAL properties cannot be modified """ data = {} @@ -95,24 +95,24 @@ def transactions(self): Fields in the returned dictionary: - - **last_collected**: the ID of the last collected log file (at the \ + - *last_collected*: the ID of the last collected log file (at the \ start of each running transaction) or ``None`` if no transactions are \ running - - **last_sealed**: the ID of the last sealed log file (at the start \ + - *last_sealed*: the ID of the last sealed log file (at the start \ of each running transaction) or ``None`` if no transactions are \ running - - **count**: the number of current running transactions + - *count*: the number of current running transactions :returns: the information about the currently running transactions :rtype: dict - :raises arango.exceptions.TransactionsListError: if the details on the - transactions cannot be retrieved + :raises arango.exceptions.WALTransactionListError: if the details on + the transactions cannot be retrieved """ res = self._conn.get('/_admin/wal/transactions') if res.status_code not in HTTP_OK: - raise TransactionsListError(res) + raise WALTransactionListError(res) return { 'last_collected': res.body['minLastCollected'], 'last_sealed': res.body['minLastSealed'], diff --git a/docs/admin.rst b/docs/admin.rst index eeaf4521..77d63877 100644 --- a/docs/admin.rst +++ b/docs/admin.rst @@ -1,3 +1,5 @@ +.. _admin-page: + Server Administration --------------------- @@ -59,4 +61,4 @@ For more information on the HTTP REST API visit this client.reload_routing() -Refer to the :ref:`ArangoClient` class for more details. +Refer to :ref:`ArangoClient` class for more details. diff --git a/docs/aql.rst b/docs/aql.rst index b373fb29..ca5bc488 100644 --- a/docs/aql.rst +++ b/docs/aql.rst @@ -1,11 +1,13 @@ +.. _aql-page: + AQL ---- **ArangoDB AQL Language (AQL)** is used to retrieve and modify data stored in ArangoDB. AQL is similar to SQL for relational databases but without the support for data-definition operations such as creating or deleting -:doc:`databases `, :doc:`collections ` and -:doc:`indexes ` etc. For more general information on AQL visit this +:ref:`databases `, :ref:`collections ` and +:ref:`indexes ` etc. For more general information on AQL visit this `page `__. AQL Queries @@ -80,7 +82,7 @@ information this `page `__. # Delete an existing AQL user function db.aql.delete_function('functions::temperature::converter') -Refer to the :ref:`AQL` class for more details. +Refer to :ref:`AQL` class for more details. AQL Query Cache @@ -112,4 +114,4 @@ information visit this # Clear the AQL query cache db.aql.cache.clear() -Refer to the :ref:`AQLQueryCache` class for more details. +Refer to :ref:`AQLQueryCache` class for more details. diff --git a/docs/async.rst b/docs/async.rst index 9bfcf481..dc56ff72 100644 --- a/docs/async.rst +++ b/docs/async.rst @@ -1,3 +1,5 @@ +.. _async-page: + Async Execution --------------- @@ -33,10 +35,12 @@ visit this `page `_. print(job, job.status()) # Retrieve the result of a job - job1.result() + result = job1.result() + assert isinstance(result, dict) # If a job fails the error is returned as opposed to being raised - assert isinstance(job3.result(), Exception) + result = job3.result() + assert isinstance(result, Exception) # Cancel a pending job job3.cancel() @@ -44,6 +48,14 @@ visit this `page `_. # Clear a result of a job from the server job4.clear() + # List the first 100 jobs done + client.async_jobs(status='done', count=100) + + # List the first 100 jobs pending in the queue + client.async_jobs(status='pending', count=100) + + # Clear all jobs from the server + client.clear_async_jobs() -Refer to the :ref:`AsyncExecution` and :ref:`AsyncJob` classes for more -details. +Refer to :ref:`ArangoClient`, :ref:`AsyncExecution` and :ref:`AsyncJob` +classes for more details. diff --git a/docs/batch.rst b/docs/batch.rst index 4deebf42..7e411a5d 100644 --- a/docs/batch.rst +++ b/docs/batch.rst @@ -1,3 +1,5 @@ +.. _batch-page: + Batch Execution --------------- @@ -47,5 +49,5 @@ For more information on the HTTP REST API for batch requests visit this batch.commit() # The commit must be called manually -Refer to the :ref:`BatchExecution` and :ref:`BatchJob` classes for more +Refer to :ref:`BatchExecution` and :ref:`BatchJob` classes for more details. diff --git a/docs/client.rst b/docs/client.rst index 58941a42..4b85df79 100644 --- a/docs/client.rst +++ b/docs/client.rst @@ -1,3 +1,5 @@ +.. _client-page: + Getting Started --------------- diff --git a/docs/collection.rst b/docs/collection.rst index 762f5cd1..9d4b2545 100644 --- a/docs/collection.rst +++ b/docs/collection.rst @@ -1,15 +1,17 @@ +.. _collection-page: + Collections ----------- -A **collection** consists of :doc:`documents `. It is uniquely +A **collection** consists of :ref:`documents `. It is uniquely identified by its name which must consist only of alphanumeric, hyphen and underscore characters. There are two types of collections: **document collections** which contain -:doc:`documents ` (standard) or **edge collections** which contain -:ref:`edges `. By default, collections use the **traditional** -key generator, which generates key values in a non-deterministic fashion. A -deterministic, auto-increment key generator can be used as well. +:ref:`documents ` (standard) or **edge collections** which +contain :ref:`edges `. By default, collections use the +**traditional** key generator which generates key values in a non-deterministic +fashion. A deterministic, auto-increment key generator can be used as well. For more information on the HTTP REST API for collection management visit this `page `__. @@ -45,5 +47,5 @@ For more information on the HTTP REST API for collection management visit this students.truncate() students.configure(journal_size=3000000) -Refer to the :ref:`Collection` class for more details. +Refer to :ref:`Collection` class for more details. diff --git a/docs/cursor.rst b/docs/cursor.rst index d7a54c68..7ba0c3c7 100644 --- a/docs/cursor.rst +++ b/docs/cursor.rst @@ -1,8 +1,10 @@ +.. _cursor-page: + Cursors ------- -Many operations defined in python-arango (including :doc:`aql` queries) return -:ref:`Cursor` objects to batch the network communication between the ArangoDB +Many operations defined in python-arango (including :ref:`aql-page` queries) +return :ref:`Cursor` objects to batch the network communication between the server and the client. Each request from the cursor fetches the next set of documents, where the total number of documents in the result set may or may not be known in advance depending on the query. For more information on the HTTP @@ -59,4 +61,4 @@ REST API for using cursors visit this # Delete the cursor from the server cursor.close() -Refer to the :ref:`Cursor` class for more details. \ No newline at end of file +Refer to :ref:`Cursor` class for more details. \ No newline at end of file diff --git a/docs/database.rst b/docs/database.rst index 2f48cc3c..232af1b0 100644 --- a/docs/database.rst +++ b/docs/database.rst @@ -1,3 +1,5 @@ +.. _database-page: + Databases --------- @@ -45,6 +47,6 @@ For more information on the HTTP REST API for database management visit this # Delete an existing database client.delete_database('test_db_01') -Refer to the :ref:`ArangoClient` and :ref:`Database` classes for more details -on database management, and the :doc:`user` page for more details on how to -create, update, replace or delete database users separately. +Refer to :ref:`ArangoClient` and :ref:`Database` classes for more details +on database management, and the :ref:`user-page` page for more details on how +to create, update, replace or delete database users separately. diff --git a/docs/document.rst b/docs/document.rst index e4218934..56438b4b 100644 --- a/docs/document.rst +++ b/docs/document.rst @@ -1,3 +1,5 @@ +.. _document-page: + Documents --------- @@ -37,7 +39,7 @@ required in python-arango. required fields ``"_from"`` and ``"_to"``. The value of these fields should be the value of a from and to vertices; ``"_id"`` fields. Edge documents are contained in :ref:`edge collections `, which are components -of :doc:`graphs `. +of :ref:`graphs `. **Here is an example of a valid edge document**: @@ -116,4 +118,4 @@ for more information on the REST HTTP API for document management visit this student['happy'] = True students.update(student) -Refer to the :ref:`Collection` class for more details. +Refer to :ref:`Collection` class for more details. diff --git a/docs/graph.rst b/docs/graph.rst index db4e207c..d99a2fbf 100644 --- a/docs/graph.rst +++ b/docs/graph.rst @@ -1,3 +1,5 @@ +.. _graph-page: + Graphs ------ @@ -79,7 +81,7 @@ refer to this `page `__. # Delete an existing vertex collection schedule.delete_vertex_collection('profs', purge=True) -Refer to the :ref:`Graph` and :ref:`VertexCollection` classes for more details. +Refer to :ref:`Graph` and :ref:`VertexCollection` classes for more details. .. _edge-definitions: @@ -142,7 +144,7 @@ refer to this `page `__. # Delete an existing edge definition (and the collection) schedule.delete_edge_definition('teaches', purge=False) -Refer to the :ref:`Graph` and :ref:`EdgeCollection` classes for more details. +Refer to :ref:`Graph` and :ref:`EdgeCollection` classes for more details. .. _graph-traversals: @@ -195,4 +197,4 @@ information on the HTTP REST API for executing graph traversals refer to this ) print(traversal_results['visited']) -Refer to the :ref:`Graph` class for more details. +Refer to :ref:`Graph` class for more details. diff --git a/docs/index.rst b/docs/index.rst index d4b85f55..40ea6c80 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -1,3 +1,5 @@ +.. _index-page: + Indexes ------- @@ -40,4 +42,4 @@ on the HTTP REST API for collection index management visit this # Delete an existing index from the collection cities.delete_index('some_index_id') -Refer to the :ref:`Collection` class for more details. +Refer to :ref:`Collection` class for more details. diff --git a/docs/intro.rst b/docs/intro.rst index a764ad88..2b8ef3c5 100644 --- a/docs/intro.rst +++ b/docs/intro.rst @@ -5,23 +5,23 @@ .. image:: /static/logo.png - Welcome to the documentation for **python-arango**, a Python driver for `ArangoDB's REST API `__. + Features ======== - Clean, Pythonic interface - Lightweight -- 95%+ API coverage +- 95%+ ArangoDB REST API coverage Compatibility ============= -- Tested with Python versions 2.7.x, 3.4.x and 3.5.x -- Latest version of python-arango (3.x) works with ArangoDB 3.x only -- Older versions of python-arango (2.x) work with ArangoDB 2.x only +- Python versions 2.7.x, 3.4.x and 3.5.x are supported +- Latest version of python-arango (3.x) supports ArangoDB 3.x only +- Older versions of python-arango support ArangoDB 1.x ~ 2.x only Installation ============ diff --git a/docs/task.rst b/docs/task.rst index 3195b365..93020d8c 100644 --- a/docs/task.rst +++ b/docs/task.rst @@ -1,3 +1,5 @@ +.. _task-page: + Task Management --------------- @@ -30,7 +32,7 @@ more information on the HTTP REST API for task management visit this name='test_task', command=''' var task = function(params){ - var db =require('@arangodb'); + var db = require('@arangodb'); db.print(params); } task(params); @@ -50,4 +52,4 @@ more information on the HTTP REST API for task management visit this # Delete an existing task db.delete_task('001', ignore_missing=False) -Refer to the :ref:`Database` class for more details. +Refer to :ref:`Database` class for more details. diff --git a/docs/transaction.rst b/docs/transaction.rst index 1075a746..f9e1f099 100644 --- a/docs/transaction.rst +++ b/docs/transaction.rst @@ -1,3 +1,5 @@ +.. _transaction-page: + Transactions ------------ @@ -65,4 +67,4 @@ transactions visit this `page `__. assert 'Greg' in db.collection('students') assert result is True -Refer to the :ref:`Transaction` class for more details. +Refer to :ref:`Transaction` class for more details. diff --git a/docs/user.rst b/docs/user.rst index 5747aa4d..6abb0a14 100644 --- a/docs/user.rst +++ b/docs/user.rst @@ -1,3 +1,5 @@ +.. _user-page: + User Management --------------- @@ -40,4 +42,4 @@ this `page `__. # Delete an existing user client.delete_user(username='jill') -Refer to the :ref:`ArangoClient` class for more details. +Refer to :ref:`ArangoClient` class for more details. diff --git a/docs/wal.rst b/docs/wal.rst index 5766bb78..c23eb199 100644 --- a/docs/wal.rst +++ b/docs/wal.rst @@ -1,3 +1,5 @@ +.. _wal-page: + Write-Ahead Log --------------- @@ -38,4 +40,4 @@ general information visit this # Flush the WAL with garbage collection client.wal.flush(garbage_collect=True) -Refer to the :ref:`WriteAheadLog` class for more details. +Refer to :ref:`WriteAheadLog` class for more details. diff --git a/scripts/setup_arangodb.sh b/scripts/setup_arangodb.sh index 224172af..483d4022 100644 --- a/scripts/setup_arangodb.sh +++ b/scripts/setup_arangodb.sh @@ -3,7 +3,7 @@ DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" cd $DIR -VERSION=3.0.4 +VERSION=3.0.5 NAME=ArangoDB-$VERSION if [ ! -d "$DIR/$NAME" ]; then diff --git a/tests/test_query.py b/tests/test_aql.py similarity index 78% rename from tests/test_query.py rename to tests/test_aql.py index babf58be..1ef254b8 100644 --- a/tests/test_query.py +++ b/tests/test_aql.py @@ -3,11 +3,23 @@ import pytest from arango import ArangoClient -from arango.exceptions import * +from arango.aql import AQL +from arango.exceptions import ( + AQLQueryExecuteError, + AQLQueryExplainError, + AQLQueryValidateError, + AQLFunctionListError, + AQLFunctionCreateError, + AQLFunctionDeleteError, + AQLCacheClearError, + AQLCacheConfigureError, + AQLCachePropertiesError, +) from .utils import ( generate_db_name, - generate_col_name + generate_col_name, + generate_user_name ) @@ -17,15 +29,23 @@ db = arango_client.create_database(db_name) col_name = generate_col_name(db) db.create_collection(col_name) +username = generate_user_name(arango_client) +user = arango_client.create_user(username, 'password') func_name = '' func_body = '' def teardown_module(*_): arango_client.delete_database(db_name, ignore_missing=True) - + arango_client.delete_user(username, ignore_missing=True) @pytest.mark.order1 +def test_init(): + assert isinstance(db.aql, AQL) + assert 'ArangoDB AQL' in repr(db.aql) + + +@pytest.mark.order2 def test_query_explain(): fields_to_check = [ 'estimatedNrItems', @@ -43,7 +63,8 @@ def test_query_explain(): plans = db.aql.explain( 'FOR d IN {} RETURN d'.format(col_name), all_plans=True, - opt_rules=['-all', '+use-index-range'] + opt_rules=['-all', '+use-index-range'], + max_plans=10 ) for plan in plans: for field in fields_to_check: @@ -59,7 +80,7 @@ def test_query_explain(): assert field in plan -@pytest.mark.order2 +@pytest.mark.order3 def test_query_validate(): # Test invalid query with pytest.raises(AQLQueryValidateError): @@ -73,7 +94,7 @@ def test_query_validate(): assert 'parsed' in result -@pytest.mark.order3 +@pytest.mark.order4 def test_query_execute(): # Test invalid AQL query with pytest.raises(AQLQueryExecuteError): @@ -103,12 +124,14 @@ def test_query_execute(): result = db.aql.execute( 'FOR d IN {} FILTER d.value == @value RETURN d'.format(col_name), bind_vars={'value': 1}, - count=True + count=True, + full_count=True, + max_plans=100 ) assert set(d['_key'] for d in result) == {'doc04', 'doc05'} -@pytest.mark.order4 +@pytest.mark.order5 def test_query_function_create_and_list(): global func_name, func_body @@ -131,7 +154,7 @@ def test_query_function_create_and_list(): assert result is True -@pytest.mark.order5 +@pytest.mark.order6 def test_query_function_delete_and_list(): # Test delete AQL function result = db.aql.delete_function(func_name) @@ -147,14 +170,14 @@ def test_query_function_delete_and_list(): assert db.aql.functions() == {} -@pytest.mark.order6 +@pytest.mark.order7 def test_get_query_cache_properties(): properties = db.aql.cache.properties() assert 'mode' in properties assert 'limit' in properties -@pytest.mark.order7 +@pytest.mark.order8 def test_set_query_cache_properties(): properties = db.aql.cache.configure( mode='on', limit=100 @@ -167,7 +190,25 @@ def test_set_query_cache_properties(): assert properties['limit'] == 100 -@pytest.mark.order8 +@pytest.mark.order9 def test_clear_query_cache(): result = db.aql.cache.clear() assert isinstance(result, bool) + + +@pytest.mark.order10 +def test_aql_errors(): + bad_db_name = generate_db_name(arango_client) + bad_aql = arango_client.database(bad_db_name).aql + + with pytest.raises(AQLFunctionListError): + bad_aql.functions() + + with pytest.raises(AQLCachePropertiesError): + bad_aql.cache.properties() + + with pytest.raises(AQLCacheConfigureError): + bad_aql.cache.configure(mode='on') + + with pytest.raises(AQLCacheClearError): + bad_aql.cache.clear() diff --git a/tests/test_async.py b/tests/test_async.py index d45bb02d..925a0dca 100644 --- a/tests/test_async.py +++ b/tests/test_async.py @@ -1,16 +1,27 @@ from __future__ import absolute_import, unicode_literals +from time import sleep, time + import pytest +from six import string_types as string from arango import ArangoClient +from arango.aql import AQL +from arango.collections import Collection from arango.exceptions import ( + AsyncExecuteError, + AsyncJobCancelError, + AsyncJobClearError, + AsyncJobResultError, + AsyncJobStatusError, + AsyncJobListError, AQLQueryExecuteError ) +from arango.graph import Graph from .utils import ( generate_db_name, - generate_col_name, - generate_graph_name + generate_col_name ) arango_client = ArangoClient() @@ -18,10 +29,7 @@ db = arango_client.create_database(db_name) col_name = generate_col_name(db) col = db.create_collection(col_name) -graph_name = generate_graph_name(db) -graph = db.create_graph(graph_name) -vcol_name = generate_col_name(db) -graph.create_vertex_collection(vcol_name) +col.add_fulltext_index(fields=['val']) def teardown_module(*_): @@ -33,49 +41,296 @@ def setup_function(*_): def wait_on_job(job): - while True: - if job.status() == 'done': - break - return job.result() + while job.status() == 'pending': + pass @pytest.mark.order1 -def test_async_inserts(): - assert len(col) == 0 +def test_init(): async = db.async(return_result=True) + + assert async.type == 'async' + assert 'ArangoDB asynchronous execution' in repr(async) + assert isinstance(async.aql, AQL) + assert isinstance(async.graph('test'), Graph) + assert isinstance(async.collection('test'), Collection) + + +@pytest.mark.order2 +def test_async_execute_error(): + bad_db = arango_client.db( + name=db_name, + username='root', + password='incorrect' + ) + async = bad_db.async(return_result=False) + with pytest.raises(AsyncExecuteError): + async.collection(col_name).insert({'_key': '1', 'val': 1}) + with pytest.raises(AsyncExecuteError): + async.collection(col_name).properties() + with pytest.raises(AsyncExecuteError): + async.aql.execute('FOR d IN {} RETURN d'.format(col_name)) + + +@pytest.mark.order3 +def test_async_inserts_without_result(): + # Test precondition + assert len(col) == 0 + + # Insert test documents asynchronously with return_result False + async = db.async(return_result=False) job1 = async.collection(col_name).insert({'_key': '1', 'val': 1}) job2 = async.collection(col_name).insert({'_key': '2', 'val': 2}) job3 = async.collection(col_name).insert({'_key': '3', 'val': 3}) + # Ensure that no jobs were returned + for job in [job1, job2, job3]: + assert job is None + + # Ensure that the asynchronously requests went through + sleep(0.5) assert len(col) == 3 - assert job1.result()['_key'] == '1' - assert job2.result()['_key'] == '2' - assert job3.result()['_key'] == '3' + assert col['1']['val'] == 1 + assert col['2']['val'] == 2 + assert col['3']['val'] == 3 -@pytest.mark.order1 +@pytest.mark.order4 +def test_async_inserts_with_result(): + # Test precondition + assert len(col) == 0 + + # Insert test documents asynchronously with return_result True + async_col = db.async(return_result=True).collection(col_name) + test_docs = [{'_key': str(i), 'val': str(i * 42)} for i in range(10000)] + job1 = async_col.insert_many(test_docs, sync=True) + job2 = async_col.insert_many(test_docs, sync=True) + job3 = async_col.insert_many(test_docs, sync=True) + + # Test get result from a pending job + with pytest.raises(AsyncJobResultError) as err: + job3.result() + assert 'Job {} not done'.format(job3.id) in err.value.message + + # Test get result from finished but with existing jobs + for job in [job1, job2, job3]: + assert 'ArangoDB asynchronous job {}'.format(job.id) in repr(job) + assert isinstance(job.id, string) + wait_on_job(job) + assert len(job.result()) == 10000 + + # Test get result from missing jobs + for job in [job1, job2, job3]: + with pytest.raises(AsyncJobResultError) as err: + job.result() + assert 'Job {} missing'.format(job.id) in err.value.message + + # Test get result without authentication + setattr(getattr(job1, '_conn'), '_password', 'incorrect') + with pytest.raises(AsyncJobResultError) as err: + job.result() + assert '401' in err.value.message + + # Retrieve the results of the jobs + assert len(col) == 10000 + + +@pytest.mark.order5 def test_async_query(): + # Set up test documents async = db.async(return_result=True) wait_on_job(async.collection(col_name).import_bulk([ {'_key': '1', 'val': 1}, {'_key': '2', 'val': 2}, {'_key': '3', 'val': 3}, ])) - result = wait_on_job(async.aql.execute('THIS IS AN INVALID QUERY')) - assert isinstance(result, AQLQueryExecuteError) - result = wait_on_job(async.aql.execute( + # Test asynchronous execution of an invalid AQL query + job = async.aql.execute('THIS IS AN INVALID QUERY') + wait_on_job(job) + assert isinstance(job.result(), AQLQueryExecuteError) + + # Test asynchronous execution of a valid AQL query + job = async.aql.execute( 'FOR d IN {} RETURN d'.format(col_name), count=True, batch_size=1, ttl=10, optimizer_rules=['+all'] - )) - assert set(d['_key'] for d in result) == {'1', '2', '3'} + ) + wait_on_job(job) + assert set(d['_key'] for d in job.result()) == {'1', '2', '3'} - result = wait_on_job(async.aql.execute( + # Test asynchronous execution of another valid AQL query + job = async.aql.execute( 'FOR d IN {} FILTER d.val == @value RETURN d'.format(col_name), bind_vars={'value': 1}, count=True - )) - assert set(d['_key'] for d in result) == {'1'} + ) + wait_on_job(job) + assert set(d['_key'] for d in job.result()) == {'1'} + + +@pytest.mark.order6 +def test_async_get_status(): + async_col = db.async(return_result=True).collection(col_name) + test_docs = [{'_key': str(i), 'val': str(i * 42)} for i in range(10000)] + + # Test get status of a pending job + job = async_col.insert_many(test_docs, sync=True) + assert job.status() == 'pending' + + # Test get status of a finished job + wait_on_job(job) + assert job.status() == 'done' + assert len(job.result()) == len(test_docs) + + # Test get status of a missing job + with pytest.raises(AsyncJobStatusError) as err: + job.status() + assert 'Job {} missing'.format(job.id) in err.value.message + + # Test get status without authentication + setattr(getattr(job, '_conn'), '_password', 'incorrect') + with pytest.raises(AsyncJobStatusError) as err: + job.status() + assert 'HTTP 401' in err.value.message + + +@pytest.mark.order7 +def test_cancel_async_job(): + async_col = db.async(return_result=True).collection(col_name) + test_docs = [{'_key': str(i), 'val': str(i * 42)} for i in range(10000)] + + job1 = async_col.insert_many(test_docs, sync=True) + job2 = async_col.insert_many(test_docs, sync=True) + job3 = async_col.insert_many(test_docs, sync=True) + + # Test cancel a pending job + assert job3.cancel() is True + + # Test cancel a finished job + for job in [job1, job2]: + wait_on_job(job) + assert job.status() == 'done' + with pytest.raises(AsyncJobCancelError) as err: + job.cancel() + assert 'Job {} missing'.format(job.id) in err.value.message + assert job.cancel(ignore_missing=True) is False + + # Test cancel a cancelled job + sleep(0.5) + with pytest.raises(AsyncJobCancelError) as err: + job3.cancel(ignore_missing=False) + assert 'Job {} missing'.format(job3.id) in err.value.message + assert job3.cancel(ignore_missing=True) is False + + # Test cancel without authentication + setattr(getattr(job1, '_conn'), '_password', 'incorrect') + with pytest.raises(AsyncJobCancelError) as err: + job1.cancel(ignore_missing=False) + assert 'HTTP 401' in err.value.message + + +@pytest.mark.order8 +def test_clear_async_job(): + # Setup test asynchronous jobs + async = db.async(return_result=True) + job1 = async.collection(col_name).insert({'_key': '1', 'val': 1}) + job2 = async.collection(col_name).insert({'_key': '2', 'val': 2}) + job3 = async.collection(col_name).insert({'_key': '3', 'val': 3}) + for job in [job1, job2, job3]: + wait_on_job(job) + + # Test clear finished jobs + assert job1.clear(ignore_missing=True) is True + assert job2.clear(ignore_missing=True) is True + assert job3.clear(ignore_missing=False) is True + + # Test clear missing jobs + for job in [job1, job2, job3]: + with pytest.raises(AsyncJobClearError) as err: + job.clear(ignore_missing=False) + assert 'Job {} missing'.format(job.id) in err.value.message + assert job.clear(ignore_missing=True) is False + + # Test clear without authentication + setattr(getattr(job1, '_conn'), '_password', 'incorrect') + with pytest.raises(AsyncJobClearError) as err: + job1.clear(ignore_missing=False) + assert 'HTTP 401' in err.value.message + + +@pytest.mark.order9 +def test_clear_async_jobs(): + # Set up test documents + async = db.async(return_result=True) + job1 = async.collection(col_name).insert({'_key': '1', 'val': 1}) + job2 = async.collection(col_name).insert({'_key': '2', 'val': 2}) + job3 = async.collection(col_name).insert({'_key': '3', 'val': 3}) + for job in [job1, job2, job3]: + wait_on_job(job) + assert job.status() == 'done' + + # Test clear all async jobs + assert arango_client.clear_async_jobs() is True + for job in [job1, job2, job3]: + with pytest.raises(AsyncJobStatusError) as err: + job.status() + assert 'Job {} missing'.format(job.id) in err.value.message + + # Set up test documents again + async = db.async(return_result=True) + job1 = async.collection(col_name).insert({'_key': '1', 'val': 1}) + job2 = async.collection(col_name).insert({'_key': '2', 'val': 2}) + job3 = async.collection(col_name).insert({'_key': '3', 'val': 3}) + for job in [job1, job2, job3]: + wait_on_job(job) + assert job.status() == 'done' + + # Test clear jobs that have not expired yet + past = int(time()) - 1000000 + assert arango_client.clear_async_jobs(threshold=past) is True + for job in [job1, job2, job3]: + assert job.status() == 'done' + + future = int(time()) + 1000000 + assert arango_client.clear_async_jobs(threshold=future) is True + for job in [job1, job2, job3]: + with pytest.raises(AsyncJobStatusError) as err: + job.status() + assert 'Job {} missing'.format(job.id) in err.value.message + + # Test clear job without authentication + with pytest.raises(AsyncJobClearError) as err: + ArangoClient(password='incorrect').clear_async_jobs() + assert 'HTTP 401' in err.value.message + + +@pytest.mark.order10 +def test_list_async_jobs(): + # Set up test documents + async = db.async(return_result=True) + job1 = async.collection(col_name).insert({'_key': '1', 'val': 1}) + job2 = async.collection(col_name).insert({'_key': '2', 'val': 2}) + job3 = async.collection(col_name).insert({'_key': '3', 'val': 3}) + jobs = [job1, job2, job3] + for job in jobs: + wait_on_job(job) + expected_job_ids = [job.id for job in jobs] + + # Test list async jobs that are done + job_ids = arango_client.async_jobs(status='done') + assert sorted(expected_job_ids) == sorted(job_ids) + + # Test list async jobs that are pending + assert arango_client.async_jobs(status='pending') == [] + + # Test list async jobs with invalid status + with pytest.raises(AsyncJobListError): + arango_client.async_jobs(status='bad_status') + + # Test list jobs with count + job_ids = arango_client.async_jobs(status='done', count=1) + assert len(job_ids) == 1 + assert job_ids[0] in expected_job_ids diff --git a/tests/test_batch.py b/tests/test_batch.py index e6384962..a41b881a 100644 --- a/tests/test_batch.py +++ b/tests/test_batch.py @@ -1,9 +1,18 @@ from __future__ import absolute_import, unicode_literals +from uuid import UUID + +import pytest + from arango import ArangoClient +from arango.aql import AQL +from arango.collections import Collection from arango.exceptions import ( - DocumentInsertError + DocumentRevisionError, + DocumentInsertError, + BatchExecuteError ) +from arango.graph import Graph from .utils import ( generate_db_name, @@ -13,6 +22,8 @@ arango_client = ArangoClient() db_name = generate_db_name(arango_client) db = arango_client.create_database(db_name) +bad_db_name = generate_db_name(arango_client) +bad_db = arango_client.db(bad_db_name) col_name = generate_col_name(db) col = db.create_collection(col_name) @@ -25,41 +36,81 @@ def setup_function(*_): col.truncate() +def test_init(): + batch = db.batch() + assert batch.type == 'batch' + assert 'ArangoDB batch execution {}'.format(batch.id) in repr(batch) + assert isinstance(batch.aql, AQL) + assert isinstance(batch.graph('test'), Graph) + assert isinstance(batch.collection('test'), Collection) + + +def test_batch_job_properties(): + with db.batch(return_result=True) as batch: + batch_col = batch.collection(col_name) + job = batch_col.insert({'_key': '1', 'val': 1}) + + assert isinstance(job.id, UUID) + assert 'ArangoDB batch job {}'.format(job.id) in repr(job) + + +def test_batch_empty_commit(): + batch = db.batch(return_result=True) + assert batch.commit() is None + + +def test_batch_invalid_commit(): + assert len(col) == 0 + batch = bad_db.batch(return_result=True) + batch_col = batch.collection(col_name) + batch_col.insert({'_key': '1', 'val': 1}) + batch_col.insert({'_key': '2', 'val': 2}) + batch_col.insert({'_key': '2', 'val': 3}) + + with pytest.raises(BatchExecuteError): + batch.commit() + assert len(col) == 0 + + def test_batch_insert_context_manager_with_result(): assert len(col) == 0 with db.batch(return_result=True) as batch: batch_col = batch.collection(col_name) - batch_job1 = batch_col.insert({'_key': '1', 'val': 1}) - batch_job2 = batch_col.insert({'_key': '2', 'val': 2}) - batch_job3 = batch_col.insert({'_key': '2', 'val': 3}) + job1 = batch_col.insert({'_key': '1', 'val': 1}) + job2 = batch_col.insert({'_key': '2', 'val': 2}) + job3 = batch_col.insert({'_key': '2', 'val': 3}) + job4 = batch_col.get(key='2', rev='9999') assert len(col) == 2 assert col['1']['val'] == 1 assert col['2']['val'] == 2 - assert batch_job1.status() == 'done' - assert batch_job1.result()['_key'] == '1' + assert job1.status() == 'done' + assert job1.result()['_key'] == '1' - assert batch_job2.status() == 'done' - assert batch_job2.result()['_key'] == '2' + assert job2.status() == 'done' + assert job2.result()['_key'] == '2' - assert batch_job3.status() == 'error' - assert isinstance(batch_job3.result(), DocumentInsertError) + assert job3.status() == 'error' + assert isinstance(job3.result(), DocumentInsertError) + + assert job4.status() == 'error' + assert isinstance(job4.result(), DocumentRevisionError) def test_batch_insert_context_manager_without_result(): assert len(col) == 0 with db.batch(return_result=False) as batch: batch_col = batch.collection(col_name) - batch_job1 = batch_col.insert({'_key': '1', 'val': 1}) - batch_job2 = batch_col.insert({'_key': '2', 'val': 2}) - batch_job3 = batch_col.insert({'_key': '2', 'val': 3}) + job1 = batch_col.insert({'_key': '1', 'val': 1}) + job2 = batch_col.insert({'_key': '2', 'val': 2}) + job3 = batch_col.insert({'_key': '2', 'val': 3}) assert len(col) == 2 assert col['1']['val'] == 1 assert col['2']['val'] == 2 - assert batch_job1 is None - assert batch_job2 is None - assert batch_job3 is None + assert job1 is None + assert job2 is None + assert job3 is None def test_batch_insert_context_manager_commit_on_error(): @@ -67,12 +118,12 @@ def test_batch_insert_context_manager_commit_on_error(): try: with db.batch(return_result=True, commit_on_error=True) as batch: batch_col = batch.collection(col_name) - batch_job1 = batch_col.insert({'_key': '1', 'val': 1}) + job1 = batch_col.insert({'_key': '1', 'val': 1}) raise ValueError('Error!') except ValueError: assert col['1']['val'] == 1 - assert batch_job1.status() == 'done' - assert batch_job1.result()['_key'] == '1' + assert job1.status() == 'done' + assert job1.result()['_key'] == '1' def test_batch_insert_context_manager_no_commit_on_error(): @@ -80,58 +131,58 @@ def test_batch_insert_context_manager_no_commit_on_error(): try: with db.batch(return_result=True, commit_on_error=False) as batch: batch_col = batch.collection(col_name) - batch_job1 = batch_col.insert({'_key': '1', 'val': 1}) + job1 = batch_col.insert({'_key': '1', 'val': 1}) raise ValueError('Error!') except ValueError: assert len(col) == 0 - assert batch_job1.status() == 'pending' - assert batch_job1.result() is None + assert job1.status() == 'pending' + assert job1.result() is None def test_batch_insert_no_context_manager_with_result(): assert len(col) == 0 batch = db.batch(return_result=True) batch_col = batch.collection(col_name) - batch_job1 = batch_col.insert({'_key': '1', 'val': 1}) - batch_job2 = batch_col.insert({'_key': '2', 'val': 2}) - batch_job3 = batch_col.insert({'_key': '2', 'val': 3}) + job1 = batch_col.insert({'_key': '1', 'val': 1}) + job2 = batch_col.insert({'_key': '2', 'val': 2}) + job3 = batch_col.insert({'_key': '2', 'val': 3}) assert len(col) == 0 - assert batch_job1.status() == 'pending' - assert batch_job1.result() is None + assert job1.status() == 'pending' + assert job1.result() is None - assert batch_job2.status() == 'pending' - assert batch_job2.result() is None + assert job2.status() == 'pending' + assert job2.result() is None - assert batch_job3.status() == 'pending' - assert batch_job3.result() is None + assert job3.status() == 'pending' + assert job3.result() is None batch.commit() assert len(col) == 2 assert col['1']['val'] == 1 assert col['2']['val'] == 2 - assert batch_job1.status() == 'done' - assert batch_job1.result()['_key'] == '1' + assert job1.status() == 'done' + assert job1.result()['_key'] == '1' - assert batch_job2.status() == 'done' - assert batch_job2.result()['_key'] == '2' + assert job2.status() == 'done' + assert job2.result()['_key'] == '2' - assert batch_job3.status() == 'error' - assert isinstance(batch_job3.result(), DocumentInsertError) + assert job3.status() == 'error' + assert isinstance(job3.result(), DocumentInsertError) def test_batch_insert_no_context_manager_without_result(): assert len(col) == 0 batch = db.batch(return_result=False) batch_col = batch.collection(col_name) - batch_job1 = batch_col.insert({'_key': '1', 'val': 1}) - batch_job2 = batch_col.insert({'_key': '2', 'val': 2}) - batch_job3 = batch_col.insert({'_key': '2', 'val': 3}) + job1 = batch_col.insert({'_key': '1', 'val': 1}) + job2 = batch_col.insert({'_key': '2', 'val': 2}) + job3 = batch_col.insert({'_key': '2', 'val': 3}) - assert batch_job1 is None - assert batch_job2 is None - assert batch_job3 is None + assert job1 is None + assert job2 is None + assert job3 is None batch.commit() assert len(col) == 2 @@ -141,23 +192,37 @@ def test_batch_insert_no_context_manager_without_result(): def test_batch_query_context_manager_with_result(): with db.batch(return_result=True, commit_on_error=False) as batch: - batch_job1 = batch.collection(col_name).import_bulk([ + job1 = batch.collection(col_name).import_bulk([ {'_key': '1', 'val': 1}, {'_key': '2', 'val': 2}, {'_key': '3', 'val': 3}, ]) - batch_job2 = batch.aql.execute( + job2 = batch.aql.execute( 'FOR d IN {} RETURN d'.format(col_name), count=True, batch_size=1, ttl=10, optimizer_rules=['+all'] ) - batch_job3 = batch.aql.execute( + job3 = batch.aql.execute( 'FOR d IN {} FILTER d.val == @value RETURN d'.format(col_name), bind_vars={'value': 1}, count=True ) - assert batch_job1.result()['created'] == 3 - assert set(d['_key'] for d in batch_job2.result()) == {'1', '2', '3'} - assert set(d['_key'] for d in batch_job3.result()) == {'1'} + assert job1.result()['created'] == 3 + assert set(d['_key'] for d in job2.result()) == {'1', '2', '3'} + assert set(d['_key'] for d in job3.result()) == {'1'} + + +def test_batch_clear(): + assert len(col) == 0 + batch = db.batch(return_result=True) + batch_col = batch.collection(col_name) + job1 = batch_col.insert({'_key': '1', 'val': 1}) + job2 = batch_col.insert({'_key': '2', 'val': 2}) + batch.clear() + batch.commit() + + assert len(col) == 0 + assert job1.status() == 'pending' + assert job2.status() == 'pending' diff --git a/tests/test_client.py b/tests/test_client.py index 7b774acd..79411356 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -2,16 +2,19 @@ from datetime import datetime -import pytest from six import string_types +import pytest from arango import ArangoClient +from arango.http_clients import DefaultHTTPClient from arango.database import Database from arango.exceptions import * from .utils import generate_db_name -arango_client = ArangoClient() +http_client = DefaultHTTPClient(use_session=False) +arango_client = ArangoClient(http_client=http_client) +bad_arango_client = ArangoClient(username='root', password='incorrect') db_name = generate_db_name(arango_client) @@ -19,10 +22,24 @@ def teardown_module(*_): arango_client.delete_database(db_name, ignore_missing=True) +def test_verify(): + assert arango_client.verify() is True + with pytest.raises(ServerConnectionError): + ArangoClient( + username='root', + password='incorrect', + verify=True + ) + + def test_properties(): assert arango_client.protocol == 'http' assert arango_client.host == 'localhost' assert arango_client.port == 8529 + assert arango_client.username == 'root' + assert arango_client.password == '' + assert arango_client.http_client == http_client + assert arango_client.logging_enabled is True assert 'ArangoDB client for' in repr(arango_client) @@ -30,17 +47,26 @@ def test_version(): version = arango_client.version() assert isinstance(version, string_types) + with pytest.raises(ServerVersionError): + bad_arango_client.version() + def test_details(): details = arango_client.details() assert 'architecture' in details assert 'server-version' in details + with pytest.raises(ServerDetailsError): + bad_arango_client.details() + def test_required_db_version(): version = arango_client.required_db_version() assert isinstance(version, string_types) + with pytest.raises(ServerRequiredDBVersionError): + bad_arango_client.required_db_version() + def test_statistics(): statistics = arango_client.statistics(description=False) @@ -54,6 +80,9 @@ def test_statistics(): assert 'figures' in description assert 'groups' in description + with pytest.raises(ServerStatisticsError): + bad_arango_client.statistics() + def test_role(): assert arango_client.role() in { @@ -63,12 +92,17 @@ def test_role(): 'SECONDARY', 'UNDEFINED' } + with pytest.raises(ServerRoleError): + bad_arango_client.role() def test_time(): system_time = arango_client.time() assert isinstance(system_time, datetime) + with pytest.raises(ServerTimeError): + bad_arango_client.time() + def test_echo(): last_request = arango_client.echo() @@ -77,9 +111,15 @@ def test_echo(): assert 'requestType' in last_request assert 'rawRequestBody' in last_request + with pytest.raises(ServerEchoError): + bad_arango_client.echo() + -# def test_sleep(): -# assert arango_client.sleep(2) == 2 +def test_sleep(): + assert arango_client.sleep(0) == 0 + + with pytest.raises(ServerSleepError): + bad_arango_client.sleep(0) def test_execute(): @@ -92,17 +132,39 @@ def test_execute(): # TODO test parameters def test_log(): - log = arango_client.read_log() + # Test read_log with default arguments + log = arango_client.read_log(upto='fatal') + assert 'lid' in log + assert 'level' in log + assert 'text' in log + assert 'total_amount' in log + + # Test read_log with specific arguments + log = arango_client.read_log( + level='error', + start=0, + size=100000, + offset=0, + search='test', + sort='desc', + ) assert 'lid' in log assert 'level' in log assert 'text' in log assert 'total_amount' in log + # Test read_log with incorrect auth + with pytest.raises(ServerReadLogError): + bad_arango_client.read_log() + def test_reload_routing(): result = arango_client.reload_routing() assert isinstance(result, bool) + with pytest.raises(ServerReloadRoutingError): + bad_arango_client.reload_routing() + def test_endpoints(): endpoints = arango_client.endpoints() @@ -110,6 +172,9 @@ def test_endpoints(): for endpoint in endpoints: assert 'endpoint' in endpoint + with pytest.raises(ServerEndpointsError): + bad_arango_client.endpoints() + def test_database_management(): # Test list databases @@ -120,6 +185,9 @@ def test_database_management(): assert '_system' in result assert db_name not in arango_client.databases() + with pytest.raises(DatabaseListError): + bad_arango_client.databases() + # Test create database result = arango_client.create_database(db_name) assert isinstance(result, Database) diff --git a/tests/test_cluster.py b/tests/test_cluster.py new file mode 100644 index 00000000..862bddc5 --- /dev/null +++ b/tests/test_cluster.py @@ -0,0 +1,57 @@ +from __future__ import absolute_import, unicode_literals + +import pytest +from six import string_types as string + +from arango import ArangoClient +from arango.aql import AQL +from arango.collections import Collection +from arango.exceptions import ClusterTestError +from arango.graph import Graph + +from .utils import ( + generate_db_name, + generate_col_name, + generate_graph_name +) + +arango_client = ArangoClient() +db_name = generate_db_name(arango_client) +db = arango_client.create_database(db_name) +col_name = generate_col_name(db) +col = db.create_collection(col_name) +graph_name = generate_graph_name(db) +graph = db.create_graph(graph_name) +vcol_name = generate_col_name(db) +graph.create_vertex_collection(vcol_name) + + +def teardown_module(*_): + arango_client.delete_database(db_name, ignore_missing=True) + + +@pytest.mark.order1 +def test_async_object(): + cluster = db.cluster( + shard_id=1, + transaction_id=1, + timeout=2000, + sync=False + ) + assert cluster.type == 'cluster' + assert 'ArangoDB cluster round-trip test' in repr(cluster) + assert isinstance(cluster.aql, AQL) + assert isinstance(cluster.graph(graph_name), Graph) + assert isinstance(cluster.collection(col_name), Collection) + + +@pytest.mark.order2 +def test_cluster_execute(): + cluster = db.cluster( + shard_id='foo', + transaction_id='bar', + timeout=2000, + sync=True + ) + with pytest.raises(ClusterTestError): + cluster.collection(col_name).checksum() diff --git a/tests/test_collection.py b/tests/test_collection.py index 2d47028c..c388fcac 100644 --- a/tests/test_collection.py +++ b/tests/test_collection.py @@ -17,6 +17,8 @@ db = arango_client.create_database(db_name) col_name = generate_col_name(db) col = db.create_collection(col_name) +bad_col_name = generate_col_name(db) +bad_col = db.collection(bad_col_name) def teardown_module(*_): @@ -46,6 +48,10 @@ def test_properties(): assert isinstance(properties['key_increment'], int) if 'key_offset' in properties: assert isinstance(properties['key_offset'], int) + with pytest.raises(CollectionBadStatusError): + assert getattr(col, '_status')(10) + with pytest.raises(CollectionPropertiesError): + bad_col.properties() def test_configure(): @@ -57,7 +63,7 @@ def test_configure(): new_sync = not old_sync new_journal_size = old_journal_size + 1 - # Test set properties + # Test configure result = col.configure(sync=new_sync, journal_size=new_journal_size) assert result['sync'] == new_sync assert result['journal_size'] == new_journal_size @@ -67,10 +73,16 @@ def test_configure(): assert new_properties['sync'] == new_sync assert new_properties['journal_size'] == new_journal_size + # Test missing collection + with pytest.raises(CollectionConfigureError): + bad_col.configure(sync=new_sync, journal_size=new_journal_size) + def test_rename(): assert col.name == col_name new_name = generate_col_name(db) + while new_name == bad_col_name: + new_name = generate_col_name(db) # Test rename collection result = col.rename(new_name) @@ -84,6 +96,9 @@ def test_rename(): assert col.name == new_name assert repr(col) == ''.format(new_name) + with pytest.raises(CollectionRenameError): + bad_col.rename(new_name) + def test_statistics(): stats = col.statistics() @@ -92,19 +107,27 @@ def test_statistics(): assert 'dead' in stats assert 'document_refs' in stats assert 'journals' in stats + with pytest.raises(CollectionStatisticsError): + bad_col.statistics() def test_revision(): revision = col.revision() assert isinstance(revision, string_types) + with pytest.raises(CollectionRevisionError): + bad_col.revision() def test_load(): assert col.load() in {'loaded', 'loading'} + with pytest.raises(CollectionLoadError): + bad_col.load() def test_unload(): assert col.unload() in {'unloaded', 'unloading'} + with pytest.raises(CollectionUnloadError): + bad_col.unload() def test_rotate(): @@ -127,6 +150,10 @@ def test_checksum(): assert col.checksum(with_rev=False, with_data=False) > 0 assert col.checksum(with_rev=False, with_data=True) > 0 + # Test checksum for missing collection + with pytest.raises(CollectionChecksumError): + bad_col.checksum() + def test_truncate(): col.insert_many([{'value': 1}, {'value': 2}, {'value': 3}]) @@ -141,3 +168,7 @@ def test_truncate(): assert 'status' in result assert 'is_system' in result assert len(col) == 0 + + # Test truncate missing collection + with pytest.raises(CollectionTruncateError): + bad_col.truncate() diff --git a/tests/test_cursor.py b/tests/test_cursor.py index 88570014..fb493804 100644 --- a/tests/test_cursor.py +++ b/tests/test_cursor.py @@ -45,6 +45,7 @@ def test_read_cursor_init(): optimizer_rules=['+all'] ) cursor_id = cursor.id + assert 'ArangoDB cursor' in repr(cursor) assert cursor.has_more() is True assert cursor.cached() is False assert cursor.statistics()['modified'] == 0 @@ -149,7 +150,7 @@ def test_read_cursor_early_finish(): ttl=1000, optimizer_rules=['+all'] ) - assert cursor.close(ignore_missing=True) is False + assert cursor.close() is True with pytest.raises(CursorCloseError): cursor.close(ignore_missing=False) @@ -248,8 +249,49 @@ def test_write_cursor_early_finish(): ttl=1000, optimizer_rules=['+all'] ) + assert cursor.close() is True + with pytest.raises(CursorCloseError): + cursor.close(ignore_missing=False) assert cursor.close(ignore_missing=True) is False + + col.truncate() + col.import_bulk([doc1, doc2, doc3, doc4]) + + cursor = db.aql.execute( + 'FOR d IN {} RETURN d'.format(col_name), + count=False, + batch_size=1, + ttl=1000, + optimizer_rules=['+all'] + ) + + +@pytest.mark.order11 +def test_cursor_context_manager(): + global cursor, cursor_id + + col.truncate() + col.import_bulk([doc1, doc2, doc3]) + + with db.aql.execute( + 'FOR d IN {} RETURN d'.format(col_name), + count=False, + batch_size=2, + ttl=1000, + optimizer_rules=['+all'] + ) as cursor: + assert clean_keys(cursor.next()) == doc1 with pytest.raises(CursorCloseError): cursor.close(ignore_missing=False) - assert clean_keys(cursor.batch()) == [doc1] + with db.aql.execute( + 'FOR d IN {} RETURN d'.format(col_name), + count=False, + batch_size=2, + ttl=1000, + optimizer_rules=['+all'] + ) as cursor: + assert clean_keys(cursor.__next__()) == doc1 + with pytest.raises(CursorCloseError): + cursor.close(ignore_missing=False) + assert cursor.close(ignore_missing=True) is False \ No newline at end of file diff --git a/tests/test_database.py b/tests/test_database.py index 25b561b3..b7d4b35a 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -16,6 +16,8 @@ arango_client = ArangoClient() db_name = generate_db_name(arango_client) db = arango_client.create_database(db_name) +bad_db_name = generate_db_name(arango_client) +bad_db = arango_client.db(bad_db_name) col_name_1 = generate_col_name(db) col_name_2 = '' db.create_collection(col_name_1) @@ -37,6 +39,10 @@ def test_properties(): assert 'path' in properties assert properties['system'] is False assert properties['name'] == db_name + assert 'ArangoDB connection' in repr(db.connection) + + with pytest.raises(DatabasePropertiesError): + bad_db.properties() @pytest.mark.order2 @@ -46,6 +52,9 @@ def test_list_collections(): for col in db.collections() ) + with pytest.raises(CollectionListError): + bad_db.collections() + @pytest.mark.order3 def test_get_collection(): @@ -77,7 +86,8 @@ def test_create_collection(): key_offset=100, edge=True, shard_count=2, - shard_fields=["test_attr"] + shard_fields=["test_attr"], + index_bucket_count=10, ) properties = col.properties() assert 'id' in properties @@ -121,6 +131,9 @@ def test_list_graphs(): assert graph['orphan_collections'] == [] assert 'revision' in graph + with pytest.raises(GraphListError): + bad_db.graphs() + @pytest.mark.order7 def test_get_graph(): diff --git a/tests/test_document.py b/tests/test_document.py index 6f036225..1d147560 100644 --- a/tests/test_document.py +++ b/tests/test_document.py @@ -17,7 +17,9 @@ db = arango_client.create_database(db_name) col_name = generate_col_name(db) col = db.create_collection(col_name) -col.add_geo_index(['coordinates']) +geo_index = col.add_geo_index(['coordinates']) +bad_col_name = generate_col_name(db) +bad_col = db.collection(bad_col_name) doc1 = {'_key': '1', 'val': 100, 'text': 'foo', 'coordinates': [1, 1]} doc2 = {'_key': '2', 'val': 100, 'text': 'bar', 'coordinates': [2, 2]} @@ -156,6 +158,10 @@ def test_insert_many(): results = col.insert_many(test_docs, return_new=False) for result, doc in zip(results, test_docs): isinstance(result, DocumentInsertError) + + # Test get with missing collection + with pytest.raises(DocumentInsertError): + bad_col.insert_many(test_docs) def test_update(): @@ -274,9 +280,8 @@ def test_update(): assert col['1']['_rev'] == current_rev # Test update in missing collection - bad_col = generate_col_name(db) with pytest.raises(DocumentUpdateError): - db.collection(bad_col).update(doc) + bad_col.update(doc) def test_update_many(): @@ -428,9 +433,8 @@ def test_update_many(): assert doc['val'] == 700 # Test update_many in missing collection - bad_col = generate_col_name(db) with pytest.raises(DocumentUpdateError): - db.collection(bad_col).update_many(docs) + bad_col.update_many(docs) def test_update_match(): @@ -482,9 +486,8 @@ def test_update_match(): assert 'val' not in col['4'] # Test update matching documents in missing collection - bad_col = generate_col_name(db) with pytest.raises(DocumentUpdateError): - db.collection(bad_col).update_match({'val': 100}, {'foo': 100}) + bad_col.update_match({'val': 100}, {'foo': 100}) def test_replace(): @@ -571,9 +574,8 @@ def test_replace(): assert col['1']['_rev'] == current_rev # Test replace in missing collection - bad_col = generate_col_name(db) with pytest.raises(DocumentReplaceError): - db.collection(bad_col).replace(doc) + bad_col.replace(doc) def test_replace_many(): @@ -685,9 +687,8 @@ def test_replace_many(): assert doc['_rev'] == current_revs[doc['_key']] # Test replace_many in missing collection - bad_col = generate_col_name(db) with pytest.raises(DocumentReplaceError): - db.collection(bad_col).replace_many(docs) + bad_col.replace_many(docs) def test_replace_match(): @@ -721,9 +722,8 @@ def test_replace_match(): assert 'foo' not in col['5'] # Test replace matching documents in missing collection - bad_col = generate_col_name(db) with pytest.raises(DocumentReplaceError): - db.collection(bad_col).replace_match({'val': 100}, {'foo': 100}) + bad_col.replace_match({'val': 100}, {'foo': 100}) def test_delete(): @@ -779,6 +779,12 @@ def test_delete(): assert bad_doc['_key'] in col assert len(col) == 1 + bad_doc.update({'_rev': 'bad_rev'}) + with pytest.raises(DocumentDeleteError): + col.delete(bad_doc, check_rev=True) + assert bad_doc['_key'] in col + assert len(col) == 1 + # Test delete (document) with check_rev assert col.delete(doc4, ignore_missing=True) is False with pytest.raises(DocumentDeleteError): @@ -786,13 +792,11 @@ def test_delete(): assert len(col) == 1 # Test delete with missing collection - bad_col = generate_col_name(db) with pytest.raises(DocumentDeleteError): - db.collection(bad_col).delete(doc5) + bad_col.delete(doc5) - bad_col = generate_col_name(db) with pytest.raises(DocumentDeleteError): - db.collection(bad_col).delete(doc5['_key']) + bad_col.delete(doc5['_key']) def test_delete_many(): @@ -872,13 +876,11 @@ def test_delete_many(): assert len(col) == 0 # Test delete_many with missing collection - bad_col = generate_col_name(db) with pytest.raises(DocumentDeleteError): - db.collection(bad_col).delete_many(docs) + bad_col.delete_many(docs) - bad_col = generate_col_name(db) with pytest.raises(DocumentDeleteError): - db.collection(bad_col).delete_many(test_doc_keys) + bad_col.delete_many(test_doc_keys) def test_delete_match(): @@ -902,6 +904,23 @@ def test_delete_match(): assert col.delete_match({'val': 100}, limit=2) == 2 assert [doc['val'] for doc in col].count(100) == 1 + with pytest.raises(DocumentDeleteError): + bad_col.delete_match({'val': 100}) + + +def test_count(): + # Set up test documents + col.import_bulk(test_docs) + + assert len(col) == len(test_docs) + assert col.count() == len(test_docs) + + with pytest.raises(DocumentCountError): + len(bad_col) + + with pytest.raises(DocumentCountError): + bad_col.count() + def test_find(): # Check preconditions @@ -937,7 +956,7 @@ def test_find(): assert doc['_key'] in {'1', '2', '3', '4', '5'} assert doc['_key'] in col - # test find in empty collection + # Test find in empty collection col.truncate() assert list(col.find({})) == [] assert list(col.find({'val': 100})) == [] @@ -945,6 +964,10 @@ def test_find(): assert list(col.find({'val': 300})) == [] assert list(col.find({'val': 400})) == [] + # Test find in missing collection + with pytest.raises(DocumentGetError): + bad_col.find({'val': 100}) + def test_has(): # Set up test documents @@ -972,6 +995,12 @@ def test_has(): bad_rev = col['5']['_rev'] + '000' assert col.has('5', rev=bad_rev, match_rev=False) is True + with pytest.raises(DocumentInError): + bad_col.has('1') + + with pytest.raises(DocumentInError): + '1' in bad_col + def test_get(): # Set up test documents @@ -1000,6 +1029,8 @@ def test_get(): bad_rev = col['5']['_rev'] + '000' with pytest.raises(DocumentRevisionError): col.get('5', rev=bad_rev, match_rev=True) + with pytest.raises(DocumentGetError): + col.get('5', rev='bad_rev') # Test get with correct revision and match_rev turned off bad_rev = col['5']['_rev'] + '000' @@ -1008,6 +1039,16 @@ def test_get(): assert result['_rev'] != bad_rev assert result['val'] == 300 + # Test get with missing collection + with pytest.raises(DocumentGetError): + bad_col.get('1') + + with pytest.raises(DocumentGetError): + bad_col['1'] + + with pytest.raises(DocumentGetError): + iter(bad_col) + def test_get_many(): # Test precondition @@ -1043,6 +1084,9 @@ def test_get_many(): assert col.get_many(['2', '3']) == [] assert col.get_many(['2', '3', '4']) == [] + with pytest.raises(DocumentGetError): + bad_col.get_many(['2', '3', '4']) + def test_all(): # Check preconditions @@ -1055,15 +1099,35 @@ def test_all(): result = list(col.all()) assert ordered(clean_keys(result)) == test_docs - # # Test all with flush - # result = list(col.all(flush=True)) - # assert order_documents(clean_keys(result)) == test_docs + # Test all with flush + # result = list(col.all(flush=True, flush_wait=1)) + # assert ordered(clean_keys(result)) == test_docs # Test all with count result = col.all(count=True) assert result.count() == len(test_docs) assert ordered(clean_keys(result)) == test_docs + # Test all with batch size + result = col.all(count=True, batch_size=1) + assert result.count() == len(test_docs) + assert ordered(clean_keys(result)) == test_docs + + # Test all with time-to-live + result = col.all(count=True, ttl=1000) + assert result.count() == len(test_docs) + assert ordered(clean_keys(result)) == test_docs + + # Test all with filters + result = col.all( + count=True, + filter_fields=['text'], + filter_type='exclude' + ) + assert result.count() == 5 + for doc in result: + assert 'text' not in doc + # Test all with a limit of 0 result = col.all(count=True, limit=0) assert result.count() == len(test_docs) @@ -1083,6 +1147,10 @@ def test_all(): for doc in list(clean_keys(list(result))): assert doc in test_docs + # Test all in missing collection + with pytest.raises(DocumentGetError): + bad_col.all() + def test_random(): # Set up test documents @@ -1100,9 +1168,7 @@ def test_random(): assert random_doc is None # Test random in missing collection - bad_col_name = generate_col_name(db) with pytest.raises(DocumentGetError): - bad_col = db.collection(bad_col_name) bad_col.random() @@ -1131,9 +1197,7 @@ def test_find_near(): assert [doc['_key'] for doc in result] == ['5', '4', '3'] # Test random in missing collection - bad_col_name = generate_col_name(db) with pytest.raises(DocumentGetError): - bad_col = db.collection(bad_col_name) bad_col.find_near(latitude=1, longitude=1, limit=1) # Test find_near in an empty collection @@ -1143,6 +1207,10 @@ def test_find_near(): result = col.find_near(latitude=5, longitude=5, limit=4) assert list(result) == [] + # Test find near in missing collection + with pytest.raises(DocumentGetError): + bad_col.find_near(latitude=1, longitude=1, limit=1) + def test_find_in_range(): # Set up required index @@ -1179,6 +1247,10 @@ def test_find_in_range(): result = col.find_in_range('val', 100, 300, inclusive=False) assert [doc['_key'] for doc in result] == ['4'] + # Test find_in_range in missing collection + with pytest.raises(DocumentGetError): + bad_col.find_in_range(field='val', lower=100, upper=200, offset=2) + # TODO the WITHIN geo function does not seem to work properly def test_find_in_radius(): @@ -1192,6 +1264,10 @@ def test_find_in_radius(): for doc in result: assert 'distance' in doc + # Test find_in_radius in missing collection + with pytest.raises(DocumentGetError): + bad_col.find_in_radius(3, 3, 10, 'distance') + def test_find_in_box(): # Set up test documents @@ -1207,6 +1283,7 @@ def test_find_in_box(): longitude1=0, latitude2=6, longitude2=3, + geo_field=geo_index['id'] ) assert clean_keys(result) == [d3, d1] @@ -1217,6 +1294,7 @@ def test_find_in_box(): latitude2=6, longitude2=3, limit=0, + geo_field=geo_index['id'] ) assert clean_keys(result) == [d3, d1] @@ -1260,6 +1338,15 @@ def test_find_in_box(): ) assert clean_keys(result) == [d2, d1] + # Test find_in_box in missing collection + with pytest.raises(DocumentGetError): + bad_col.find_in_box( + latitude1=0, + longitude1=0, + latitude2=6, + longitude2=3, + ) + def test_find_by_text(): # Set up required index @@ -1332,8 +1419,6 @@ def test_import_bulk(): assert len(col) == 1 # Test import bulk in missing collection - bad_col_name = generate_col_name(db) with pytest.raises(DocumentInsertError): - bad_col = db.collection(bad_col_name) bad_col.import_bulk([doc3, doc4], halt_on_error=True) assert len(col) == 1 diff --git a/tests/test_graph.py b/tests/test_graph.py index 839e15de..2651199b 100644 --- a/tests/test_graph.py +++ b/tests/test_graph.py @@ -1,4 +1,4 @@ -from __future__ import absolute_import, unicode_literals +#from __future__ import absolute_import, unicode_literals import pytest @@ -22,6 +22,11 @@ col = db.create_collection(col_name) graph_name = generate_graph_name(db) graph = db.create_graph(graph_name) +bad_graph_name = generate_graph_name(db) +bad_graph = db.graph(bad_graph_name) +bad_col_name = generate_col_name(db) +bad_vcol = bad_graph.vertex_collection(bad_col_name) +bad_ecol = bad_graph.edge_collection(bad_col_name) # vertices in test vertex collection #1 vertex1 = {'_key': '1', 'value': 1} @@ -64,6 +69,10 @@ def test_properties(): assert properties['name'] == graph_name assert properties['revision'].isdigit() + # Test if exception is raised properly + with pytest.raises(GraphPropertiesError): + bad_graph.properties() + @pytest.mark.order2 def test_create_vertex_collection(): @@ -72,7 +81,9 @@ def test_create_vertex_collection(): vcol1 = graph.create_vertex_collection('vcol1') assert isinstance(vcol1, VertexCollection) assert vcol1.name == 'vcol1' - + assert vcol1.name in repr(vcol1) + assert graph.name in repr(vcol1) + assert graph.name == vcol1.graph_name assert graph.vertex_collections() == ['vcol1'] assert graph.orphan_collections() == ['vcol1'] assert 'vcol1' in set(c['name'] for c in db.collections()) @@ -98,6 +109,12 @@ def test_create_vertex_collection(): def test_list_vertex_collections(): assert graph.vertex_collections() == ['vcol1', 'vcol2'] + # Test if exception is raised properly + with pytest.raises(VertexCollectionListError): + bad_graph.vertex_collections() + with pytest.raises(OrphanCollectionListError): + bad_graph.orphan_collections() + @pytest.mark.order4 def test_delete_vertex_collection(): @@ -126,6 +143,9 @@ def test_create_edge_definition(): ecol1 = graph.create_edge_definition('ecol1', [], []) assert isinstance(ecol1, EdgeCollection) assert ecol1.name == 'ecol1' + assert ecol1.name in repr(ecol1) + assert graph.name in repr(ecol1) + assert graph.name == ecol1.graph_name assert graph.edge_definitions() == [{ 'name': 'ecol1', @@ -222,6 +242,10 @@ def test_list_edge_definitions(): } ] + # Test if exception is raised properly + with pytest.raises(EdgeDefinitionListError): + bad_graph.edge_definitions() + @pytest.mark.order7 def test_replace_edge_definition(): @@ -324,6 +348,25 @@ def test_delete_edge_definition(): @pytest.mark.order9 +def test_create_graph_with_vertices_ane_edges(): + new_graph_name = generate_graph_name(db) + edge_definitions = [ + { + 'name': 'ecol1', + 'from_collections': ['vcol3'], + 'to_collections': ['vcol2'] + } + ] + new_graph = db.create_graph( + new_graph_name, + edge_definitions=edge_definitions, + orphan_collections=['vcol1'] + ) + assert new_graph.edge_definitions() == edge_definitions + assert new_graph.orphan_collections() == ['vcol1'] + + +@pytest.mark.order10 def test_insert_vertex(): vcol = graph.vertex_collection('vcol1') @@ -342,7 +385,7 @@ def test_insert_vertex(): # Test insert vertex into missing collection with pytest.raises(DocumentInsertError): - assert graph.vertex_collection('missing').insert(vertex2) + assert bad_vcol.insert(vertex2) assert '2' not in vcol assert len(vcol) == 1 @@ -352,7 +395,7 @@ def test_insert_vertex(): assert len(vcol) == 1 # Test insert second vertex - result = vcol.insert(vertex2) + result = vcol.insert(vertex2, sync=True) assert result['_id'] == 'vcol1/2' assert result['_key'] == '2' assert result['_rev'].isdigit() @@ -365,7 +408,7 @@ def test_insert_vertex(): assert vcol.insert(vertex2) -@pytest.mark.order10 +@pytest.mark.order11 def test_get_vertex(): vcol = graph.vertex_collection('vcol1') @@ -381,11 +424,15 @@ def test_get_vertex(): with pytest.raises(DocumentRevisionError): vcol.get('1', rev=str(int(old_rev) + 1)) + # Test get existing vertex from missing vertex collection + with pytest.raises(DocumentGetError): + bad_vcol.get('1') + # Test get existing vertex again assert clean_keys(vcol.get('2')) == {'_key': '2', 'value': 2} -@pytest.mark.order11 +@pytest.mark.order12 def test_update_vertex(): vcol = graph.vertex_collection('vcol1') @@ -422,6 +469,12 @@ def test_update_vertex(): assert vcol['1']['foo'] == 200 assert vcol['1']['bar'] == 400 + # Test update vertex in missing vertex collection + with pytest.raises(DocumentUpdateError): + bad_vcol.update({'_key': '1', 'bar': 500}) + assert vcol['1']['foo'] == 200 + assert vcol['1']['bar'] == 400 + # Test update vertex with sync option result = vcol.update({'_key': '1', 'bar': 500}, sync=True) assert result['_id'] == 'vcol1/1' @@ -449,7 +502,7 @@ def test_update_vertex(): assert vcol['1']['bar'] is None -@pytest.mark.order12 +@pytest.mark.order13 def test_replace_vertex(): vcol = graph.vertex_collection('vcol1') @@ -492,6 +545,12 @@ def test_replace_vertex(): assert vcol['1']['bar'] == 500 assert 'foo' not in vcol['1'] + # Test replace vertex in missing vertex collection + with pytest.raises(DocumentReplaceError): + bad_vcol.replace({'_key': '1', 'bar': 600}) + assert vcol['1']['bar'] == 500 + assert 'foo' not in vcol['1'] + # Test replace vertex with sync option vertex = {'_key': '1', 'bar': 400, 'foo': 200} result = vcol.replace(vertex, sync=True) @@ -502,7 +561,7 @@ def test_replace_vertex(): assert vcol['1']['bar'] == 400 -@pytest.mark.order13 +@pytest.mark.order14 def test_delete_vertex(): vcol = graph.vertex_collection('vcol1') vcol.truncate() @@ -528,21 +587,23 @@ def test_delete_vertex(): vcol.delete(vertex2) assert '2' in vcol + with pytest.raises(DocumentDeleteError): + bad_vcol.delete({'_key': '10', '_rev': 'boo'}, ignore_missing=True) + assert '2' in vcol + # Test delete vertex from missing collection with pytest.raises(DocumentDeleteError): - graph.vertex_collection('missing').delete( - vertex1, ignore_missing=False - ) + bad_vcol.delete(vertex1, ignore_missing=False) # Test delete missing vertex with pytest.raises(DocumentDeleteError): vcol.delete({'_key': '10'}, ignore_missing=False) # Test delete missing vertex while ignoring missing - vcol.delete({'_key': '10'}, ignore_missing=True) is None + assert vcol.delete({'_key': '10'}, ignore_missing=True) is False -@pytest.mark.order14 +@pytest.mark.order15 def test_insert_edge(): ecol = graph.edge_collection('ecol2') ecol.truncate() @@ -573,7 +634,7 @@ def test_insert_edge(): # Test insert valid edge into missing collection with pytest.raises(DocumentInsertError): - assert graph.vertex_collection('missing').insert(edge2) + assert bad_ecol.insert(edge2) assert '2' not in ecol assert len(ecol) == 1 @@ -583,7 +644,7 @@ def test_insert_edge(): assert len(ecol) == 1 # Test insert second valid edge - result = ecol.insert(edge2) + result = ecol.insert(edge2, sync=True) assert result['_id'] == 'ecol2/2' assert result['_key'] == '2' assert '2' in ecol @@ -621,30 +682,34 @@ def test_insert_edge(): assert 'd' not in vcol3 -@pytest.mark.order15 +@pytest.mark.order16 def test_get_edge(): ecol = graph.edge_collection('ecol2') ecol.truncate() for edge in [edge1, edge2, edge4]: ecol.insert(edge) - # Test get missing vertex + # Test get missing edge assert ecol.get('0') is None - # Test get existing vertex + # Test get existing edge result = ecol.get('1') old_rev = result['_rev'] assert clean_keys(result) == edge1 - # Test get existing vertex with wrong revision + # Test get existing edge with wrong revision with pytest.raises(DocumentRevisionError): ecol.get('1', rev=str(int(old_rev) + 1)) - # Test get existing vertex again + # Test get existing edge from missing edge collection + with pytest.raises(DocumentGetError): + bad_ecol.get('1') + + # Test get existing edge again assert clean_keys(ecol.get('2')) == edge2 -@pytest.mark.order16 +@pytest.mark.order17 def test_update_edge(): ecol = graph.edge_collection('ecol2') ecol.truncate() @@ -683,6 +748,12 @@ def test_update_edge(): assert ecol['1']['foo'] == 200 assert ecol['1']['bar'] == 400 + # Test update edge in missing edge collection + with pytest.raises(DocumentUpdateError): + bad_ecol.update({'_key': '1', 'bar': 500}) + assert ecol['1']['foo'] == 200 + assert ecol['1']['bar'] == 400 + # Test update edge with sync option result = ecol.update({'_key': '1', 'bar': 500}, sync=True) assert result['_id'] == 'ecol2/1' @@ -739,7 +810,7 @@ def test_update_edge(): assert ecol['1']['_rev'] != old_rev -@pytest.mark.order17 +@pytest.mark.order18 def test_replace_edge(): ecol = graph.edge_collection('ecol2') ecol.truncate() @@ -787,6 +858,12 @@ def test_replace_edge(): assert ecol['1']['foo'] == 300 assert ecol['1']['bar'] == 400 + # Test replace edge in missing edge collection + with pytest.raises(DocumentReplaceError): + bad_ecol.replace(edge) + assert ecol['1']['foo'] == 300 + assert ecol['1']['bar'] == 400 + # Test replace edge with sync option edge['_rev'] = None result = ecol.replace(edge, sync=True) @@ -826,7 +903,7 @@ def test_replace_edge(): assert ecol['1']['_rev'] != old_rev -@pytest.mark.order18 +@pytest.mark.order19 def test_delete_edge(): ecol = graph.edge_collection('ecol2') ecol.truncate() @@ -852,9 +929,7 @@ def test_delete_edge(): # Test delete edge from missing collection with pytest.raises(DocumentDeleteError): - graph.vertex_collection('missing').delete( - edge1, ignore_missing=False - ) + bad_ecol.delete(edge1, ignore_missing=False) # Test delete missing edge with pytest.raises(DocumentDeleteError): @@ -864,7 +939,7 @@ def test_delete_edge(): ecol.delete(edge3, ignore_missing=True) is None -@pytest.mark.order19 +@pytest.mark.order20 def test_traverse(): # Create test graph, vertex and edge collections curriculum = db.create_graph('curriculum') @@ -938,7 +1013,7 @@ def test_traverse(): result = curriculum.traverse( start_vertex='profs/anna', strategy='dfs', - direction='any' + direction='any', ) dfs_vertices = [v['_key'] for v in result['vertices']] result = curriculum.traverse( @@ -957,3 +1032,13 @@ def test_traverse(): ) visited_vertices = sorted([v['_key'] for v in result['vertices']]) assert visited_vertices == ['MAT102', 'MAT223', 'andy'] + + # Traverse the graph with uniqueness (should be same as before) + result = curriculum.traverse( + start_vertex='profs/andy', + vertex_uniqueness='global', + edge_uniqueness='global', + filter_func='if (vertex._key == "MAT101") {return "exclude";} return;' + ) + visited_vertices = sorted([v['_key'] for v in result['vertices']]) + assert visited_vertices == ['MAT102', 'MAT223', 'andy'] diff --git a/tests/test_index.py b/tests/test_index.py index 9c623d03..0e03ba48 100644 --- a/tests/test_index.py +++ b/tests/test_index.py @@ -4,7 +4,9 @@ from arango import ArangoClient from arango.exceptions import ( - IndexCreateError + IndexListError, + IndexCreateError, + IndexDeleteError ) from .utils import ( @@ -17,6 +19,8 @@ db = arango_client.create_database(db_name) col_name = generate_col_name(db) col = db.create_collection(col_name) +bad_col_name = generate_col_name(db) +bad_col = db.collection(bad_col_name) col.add_geo_index(['coordinates']) @@ -41,17 +45,24 @@ def test_list_indexes(): assert isinstance(indexes, list) assert expected_index in indexes + with pytest.raises(IndexListError): + bad_col.indexes() + def test_add_hash_index(): fields = ['attr1', 'attr2'] - result = col.add_hash_index(fields, unique=True) + result = col.add_hash_index( + fields=fields, + unique=True, + sparse=True + ) expected_index = { 'selectivity': 1, - 'sparse': False, + 'sparse': True, 'type': 'hash', 'fields': ['attr1', 'attr2'], - 'unique': True + 'unique': True, } for key, value in expected_index.items(): assert result[key] == value @@ -62,10 +73,14 @@ def test_add_hash_index(): def test_add_skiplist_index(): fields = ['attr1', 'attr2'] - result = col.add_skiplist_index(fields, unique=True) + result = col.add_skiplist_index( + fields=fields, + unique=True, + sparse=True + ) expected_index = { - 'sparse': False, + 'sparse': True, 'type': 'skiplist', 'fields': ['attr1', 'attr2'], 'unique': True @@ -79,7 +94,10 @@ def test_add_skiplist_index(): def test_add_geo_index(): # Test add geo index with one attribute - result = col.add_geo_index(fields=['attr1'], ordered=False) + result = col.add_geo_index( + fields=['attr1'], + ordered=False + ) expected_index = { 'sparse': True, @@ -173,8 +191,21 @@ def test_delete_index(): new_indexes = set(index['id'] for index in col.indexes()) assert new_indexes.issuperset(old_indexes) - for index_id in new_indexes - old_indexes: + indexes_to_delete = new_indexes - old_indexes + for index_id in indexes_to_delete: assert col.delete_index(index_id) is True new_indexes = set(index['id'] for index in col.indexes()) assert new_indexes == old_indexes + + # Test delete missing indexes + for index_id in indexes_to_delete: + assert col.delete_index(index_id, ignore_missing=True) is False + for index_id in indexes_to_delete: + with pytest.raises(IndexDeleteError): + col.delete_index(index_id, ignore_missing=False) + + # Test delete indexes in missing collection + for index_id in indexes_to_delete: + with pytest.raises(IndexDeleteError): + bad_col.delete_index(index_id, ignore_missing=False) diff --git a/tests/test_task.py b/tests/test_task.py index 7889f1c2..2103c1e1 100644 --- a/tests/test_task.py +++ b/tests/test_task.py @@ -15,6 +15,8 @@ arango_client = ArangoClient() db_name = generate_db_name(arango_client) db = arango_client.create_database(db_name) +bad_db_name = generate_db_name(arango_client) +bad_db = arango_client.db(bad_db_name) test_cmd = "(function(p){require('@arangodb').print(p);})(p)" @@ -35,6 +37,9 @@ def test_list_tasks(): assert isinstance(task['created'], float) assert isinstance(task['command'], string_types) + with pytest.raises(TaskListError): + bad_db.tasks() + def test_get_task(): # Test get existing tasks diff --git a/tests/test_transaction.py b/tests/test_transaction.py index 34bef38f..c77e8734 100644 --- a/tests/test_transaction.py +++ b/tests/test_transaction.py @@ -3,6 +3,7 @@ import pytest from arango import ArangoClient +from arango.collections import Collection from arango.exceptions import TransactionError from .utils import ( @@ -37,6 +38,18 @@ def setup_function(*_): col.truncate() +def test_init(): + txn = db.transaction( + read=col_name, + write=col_name, + sync=True, + timeout=1000, + ) + assert txn.type == 'transaction' + assert 'ArangoDB transaction {}'.format(txn.id) in repr(txn) + assert isinstance(txn.collection('test'), Collection) + + def test_execute_without_params(): txn = db.transaction(write=col_name) result = txn.execute( @@ -47,7 +60,9 @@ def test_execute_without_params(): db.{col}.save({{ '_key': '2', 'val': 2}}); return 'success without params!'; }} - '''.format(col=col_name) + '''.format(col=col_name), + sync=False, + timeout=1000 ) assert result == 'success without params!' assert '1' in col and col['1']['val'] == 1 @@ -71,6 +86,22 @@ def test_execute_with_params(): assert col['2']['val'] == 4 +def test_execute_with_errors(): + txn = db.transaction(write=col_name) + bad_col_name = generate_col_name(db) + with pytest.raises(TransactionError): + txn.execute( + command=''' + function (params) {{ + var db = require('internal').db; + db.{col}.save({{ '_key': '1', 'val': params.one }}); + db.{col}.save({{ '_key': '2', 'val': params.two }}); + return 'this transaction should fail!'; + }}'''.format(col=bad_col_name), + params={'one': 3, 'two': 4} + ) + + def test_unsupported_methods(): txn = db.transaction(write=col_name) diff --git a/tests/test_user.py b/tests/test_user.py index c683a975..9d46ddf4 100644 --- a/tests/test_user.py +++ b/tests/test_user.py @@ -1,7 +1,7 @@ from __future__ import absolute_import, unicode_literals -import pytest from six import string_types +import pytest from arango import ArangoClient from arango.exceptions import * @@ -13,6 +13,7 @@ ) arango_client = ArangoClient() +bad_arango_client = ArangoClient(password='incorrect') db_name = generate_db_name(arango_client) arango_client.create_database(db_name) @@ -32,6 +33,9 @@ def test_list_users(): assert isinstance(user['extra'], dict) assert isinstance(user['change_password'], bool) + with pytest.raises(UserListError): + bad_arango_client.users() + def test_get_user(): # Test get existing users @@ -39,9 +43,9 @@ def test_get_user(): assert arango_client.user(user['username']) == user # Test get missing user - missing_username = generate_user_name(arango_client) + bad_username = generate_user_name(arango_client) with pytest.raises(UserGetError): - arango_client.user(missing_username) + arango_client.user(bad_username) def test_create_user(): @@ -106,9 +110,9 @@ def test_update_user(): assert arango_client.user(new_user['username']) == new_user # Test update missing user - missing_username = generate_user_name(arango_client) + bad_username = generate_user_name(arango_client) with pytest.raises(UserUpdateError): - arango_client.update_user(missing_username, password='password') + arango_client.update_user(bad_username, password='password') def test_replace_user(): @@ -137,9 +141,9 @@ def test_replace_user(): assert arango_client.user(new_user['username']) == new_user # Test replace missing user - missing_username = generate_user_name(arango_client) + bad_username = generate_user_name(arango_client) with pytest.raises(UserReplaceError): - arango_client.replace_user(missing_username, password='password') + arango_client.replace_user(bad_username, password='password') def test_delete_user(): @@ -176,9 +180,9 @@ def test_grant_user_access(): assert col_name in set(col['name'] for col in db.collections()) # Test grant access to missing user - missing_username = generate_user_name(arango_client) + bad_username = generate_user_name(arango_client) with pytest.raises(UserGrantAccessError): - arango_client.grant_user_access(missing_username, db_name) + arango_client.grant_user_access(bad_username, db_name) def test_revoke_user_access(): @@ -200,6 +204,6 @@ def test_revoke_user_access(): assert err.value.http_code == 401 # Test revoke access to missing user - missing_username = generate_user_name(arango_client) + bad_username = generate_user_name(arango_client) with pytest.raises(UserRevokeAccessError): - arango_client.revoke_user_access(missing_username, db_name) + arango_client.revoke_user_access(bad_username, db_name) diff --git a/tests/test_wal.py b/tests/test_wal.py index a438efc4..ad4c528e 100644 --- a/tests/test_wal.py +++ b/tests/test_wal.py @@ -3,14 +3,29 @@ import pytest from arango import ArangoClient +from arango.exceptions import ( + WALConfigureError, + WALFlushError, + WALPropertiesError, + WALTransactionListError +) + +from .utils import generate_user_name arango_client = ArangoClient() wal = arango_client.wal +username = generate_user_name(arango_client) +user = arango_client.create_user(username, 'password') + + +def teardown_module(*_): + arango_client.delete_user(username, ignore_missing=True) @pytest.mark.order1 def test_wal_properties(): properties = wal.properties() + assert 'ArangoDB write-ahead log' in repr(wal) assert 'oversized_ops' in properties assert 'log_size' in properties assert 'historic_logs' in properties @@ -44,6 +59,28 @@ def test_wal_list_transactions(): assert 'last_collected' in result -# @pytest.mark.order4 -# def test_flush_wal(): -# assert isinstance(wal.flush(), bool) +@pytest.mark.order4 +def test_flush_wal(): + result = wal.flush(garbage_collect=False, sync=False) + assert isinstance(result, bool) + + +@pytest.mark.order5 +def test_wal_errors(): + client_with_bad_user = ArangoClient( + username=username, + password='incorrect', + verify=False + ) + bad_wal = client_with_bad_user.wal + with pytest.raises(WALPropertiesError): + bad_wal.properties() + + with pytest.raises(WALConfigureError): + bad_wal.configure(log_size=2000000) + + with pytest.raises(WALTransactionListError): + bad_wal.transactions() + + with pytest.raises(WALFlushError): + bad_wal.flush(garbage_collect=False, sync=False)