diff --git a/README.md b/README.md index 99844043..049c1e65 100644 --- a/README.md +++ b/README.md @@ -57,36 +57,35 @@ for hero in marvel_heroes.run(connection): ``` ### Asyncio mode -Asyncio mode is compatible with Python ≥ 3.4, which is when asyncio was -introduced into the standard library. +Asyncio mode is compatible with Python ≥ 3.5. ```python import asyncio from rethinkdb import r -# Native coroutines are supported in Python ≥ 3.5. In Python 3.4, you should -# use the @asyncio.couroutine decorator instead of "async def", and "yield from" -# instead of "await". async def main(): - r.set_loop_type('asyncio') - connection = await r.connect(db='test') - - await r.table_create('marvel').run(connection) - - marvel_heroes = r.table('marvel') - await marvel_heroes.insert({ - 'id': 1, - 'name': 'Iron Man', - 'first_appearance': 'Tales of Suspense #39' - }).run(connection) - - # "async for" is supported in Python ≥ 3.6. In earlier versions, you should - # call "await cursor.next()" in a loop. - cursor = await marvel_heroes.run(connection) - async for hero in cursor: - print(hero['name']) - -asyncio.get_event_loop().run_until_complete(main()) + async with await r.connect(db='test') as connection: + await r.table_create('marvel').run(connection) + + marvel_heroes = r.table('marvel') + await marvel_heroes.insert({ + 'id': 1, + 'name': 'Iron Man', + 'first_appearance': 'Tales of Suspense #39' + }).run(connection) + + # "async for" is supported in Python ≥ 3.6. In earlier versions, you should + # call "await cursor.next()" in a loop. + cursor = await marvel_heroes.run(connection) + async for hero in cursor: + print(hero['name']) + # The `with` block performs `await connection.close(noreply_wait=False)`. + +r.set_loop_type('asyncio') + +# "asyncio.run" was added in Python 3.7. In earlier versions, you +# might try asyncio.get_event_loop().run_until_complete(main()). +asyncio.run(main()) ``` ### Gevent mode @@ -253,8 +252,5 @@ $ export DO_TOKEN= $ make test-remote ``` -## New features -Github's Issue tracker is **ONLY** used for reporting bugs. NO NEW FEATURE ACCEPTED! Use [spectrum](https://spectrum.chat/rethinkdb) for supporting features. - ## Contributing Hurray! You reached this section which means, that you would like to contribute. Please read our contributing guide lines and feel free to open a pull request. diff --git a/requirements.txt b/requirements.txt index 86383dd8..4d2e981d 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,14 +1,18 @@ async-generator==1.10; python_version>="3.6" +coverage<=4.5.4; python_version<"3.5" +coverage==5.5; python_version>="3.5" codacy-coverage==1.3.11 +looseversion==1.3.0 mock==3.0.5 -pytest-cov==2.10.0 +pytest-cov==2.10.1 pytest-tornasync==0.6.0.post2; python_version >= '3.5' pytest-trio==0.6.0; python_version>="3.6" pytest==4.6.6; python_version<"3.5" -pytest==5.4.3; python_version>="3.5" +pytest==6.1.2; python_version>="3.5" six==1.15.0 tornado==5.1.1; python_version<"3.6" tornado==6.0.4; python_version>="3.6" trio==0.16.0; python_version>="3.6" -outcome==1.0.1; python_version>="3.5" -attrs==19.3.0; python_version>="3.5" +outcome==1.1.0; python_version>="3.6" +outcome==1.0.1; python_version<="3.5" +attrs==20.3.0; python_version>="3.5" diff --git a/rethinkdb/_restore.py b/rethinkdb/_restore.py index 178f1bdc..2c29eb1c 100755 --- a/rethinkdb/_restore.py +++ b/rethinkdb/_restore.py @@ -244,7 +244,7 @@ def do_unzip(temp_dir, options): ) # filter out tables we are not looking for - table = os.path.splitext(file_name) + table = os.path.splitext(file_name)[0] if tables_to_export and not ( (db, table) in tables_to_export or (db, None) in tables_to_export ): diff --git a/rethinkdb/ast.py b/rethinkdb/ast.py index 3b9fddc6..3623cbf5 100644 --- a/rethinkdb/ast.py +++ b/rethinkdb/ast.py @@ -20,13 +20,21 @@ import base64 import binascii -import collections import datetime import json +import sys import threading from rethinkdb import ql2_pb2 -from rethinkdb.errors import QueryPrinter, ReqlDriverCompileError, ReqlDriverError, T +from rethinkdb.errors import (QueryPrinter, ReqlDriverCompileError, + ReqlDriverError, T) + +if sys.version_info < (3, 3): + # python < 3.3 uses collections + import collections +else: + # but collections is deprecated from python >= 3.3 + import collections.abc as collections P_TERM = ql2_pb2.Term.TermType @@ -74,7 +82,7 @@ def clear(cls): def expr(val, nesting_depth=20): """ - Convert a Python primitive into a RQL primitive value + Convert a Python primitive into a RQL primitive value """ if not isinstance(nesting_depth, int): raise ReqlDriverCompileError("Second argument to `r.expr` must be a number.") @@ -639,7 +647,7 @@ def compose(self, args, optargs): ] if self.infix: - return T("(", T(*t_args, intsp=[" ", self.statement_infix, " "]), ")") + return T("(", T(*t_args, intsp=[" ", self.st_infix, " "]), ")") else: return T("r.", self.statement, "(", T(*t_args, intsp=", "), ")") @@ -759,7 +767,7 @@ def recursively_make_hashable(obj): class ReQLEncoder(json.JSONEncoder): """ - Default JSONEncoder subclass to handle query conversion. + Default JSONEncoder subclass to handle query conversion. """ def __init__(self): @@ -779,7 +787,7 @@ def default(self, obj): class ReQLDecoder(json.JSONDecoder): """ - Default JSONDecoder subclass to handle pseudo-type conversion. + Default JSONDecoder subclass to handle pseudo-type conversion. """ def __init__(self, reql_format_opts=None): diff --git a/rethinkdb/asyncio_net/net_asyncio.py b/rethinkdb/asyncio_net/net_asyncio.py index 781081e5..e0058c4d 100644 --- a/rethinkdb/asyncio_net/net_asyncio.py +++ b/rethinkdb/asyncio_net/net_asyncio.py @@ -20,6 +20,7 @@ import socket import ssl import struct +import sys from rethinkdb import ql2_pb2 from rethinkdb.errors import ( @@ -39,13 +40,12 @@ pQuery = ql2_pb2.Query.QueryType -@asyncio.coroutine -def _read_until(streamreader, delimiter): +async def _read_until(streamreader, delimiter): """Naive implementation of reading until a delimiter""" buffer = bytearray() while True: - c = yield from streamreader.read(1) + c = await streamreader.read(1) if c == b"": break # EOF buffer.append(c[0]) @@ -69,13 +69,12 @@ def reusable_waiter(loop, timeout): else: deadline = None - @asyncio.coroutine - def wait(future): + async def wait(future): if deadline is not None: new_timeout = max(deadline - loop.time(), 0) else: new_timeout = None - return (yield from asyncio.wait_for(future, new_timeout, loop=loop)) + return (await asyncio.wait_for(future, new_timeout)) return wait @@ -101,20 +100,18 @@ def __init__(self, *args, **kwargs): def __aiter__(self): return self - @asyncio.coroutine - def __anext__(self): + async def __anext__(self): try: - return (yield from self._get_next(None)) + return (await self._get_next(None)) except ReqlCursorEmpty: raise StopAsyncIteration - @asyncio.coroutine - def close(self): + async def close(self): if self.error is None: self.error = self._empty_error() if self.conn.is_open(): self.outstanding_requests += 1 - yield from self.conn._parent._stop(self) + await self.conn._parent._stop(self) def _extend(self, res_buf): Cursor._extend(self, res_buf) @@ -123,8 +120,7 @@ def _extend(self, res_buf): # Convenience function so users know when they've hit the end of the cursor # without having to catch an exception - @asyncio.coroutine - def fetch_next(self, wait=True): + async def fetch_next(self, wait=True): timeout = Cursor._wait_to_timeout(wait) waiter = reusable_waiter(self.conn._io_loop, timeout) while len(self.items) == 0 and self.error is None: @@ -132,7 +128,7 @@ def fetch_next(self, wait=True): if self.error is not None: raise self.error with translate_timeout_errors(): - yield from waiter(asyncio.shield(self.new_response)) + await waiter(asyncio.shield(self.new_response)) # If there is a (non-empty) error to be received, we return True, so the # user will receive it on the next `next` call. return len(self.items) != 0 or not isinstance(self.error, RqlCursorEmpty) @@ -142,15 +138,14 @@ def _empty_error(self): # with mechanisms to return from a coroutine. return RqlCursorEmpty() - @asyncio.coroutine - def _get_next(self, timeout): + async def _get_next(self, timeout): waiter = reusable_waiter(self.conn._io_loop, timeout) while len(self.items) == 0: self._maybe_fetch_batch() if self.error is not None: raise self.error with translate_timeout_errors(): - yield from waiter(asyncio.shield(self.new_response)) + await waiter(asyncio.shield(self.new_response)) return self.items.popleft() def _maybe_fetch_batch(self): @@ -162,6 +157,8 @@ def _maybe_fetch_batch(self): self.outstanding_requests += 1 asyncio.ensure_future(self.conn._parent._continue(self)) +# Python <3.7's StreamWriter has no wait_closed(). +DO_WAIT_CLOSED = sys.version_info >= (3, 7) class ConnectionInstance(object): _streamreader = None @@ -186,8 +183,7 @@ def client_address(self): if self.is_open(): return self._streamwriter.get_extra_info("sockname")[0] - @asyncio.coroutine - def connect(self, timeout): + async def connect(self, timeout): try: ssl_context = None if len(self._parent.ssl) > 0: @@ -199,10 +195,9 @@ def connect(self, timeout): ssl_context.check_hostname = True # redundant with match_hostname ssl_context.load_verify_locations(self._parent.ssl["ca_certs"]) - self._streamreader, self._streamwriter = yield from asyncio.open_connection( + self._streamreader, self._streamwriter = await asyncio.open_connection( self._parent.host, self._parent.port, - loop=self._io_loop, ssl=ssl_context, ) self._streamwriter.get_extra_info("socket").setsockopt( @@ -227,26 +222,25 @@ def connect(self, timeout): break # This may happen in the `V1_0` protocol where we send two requests as # an optimization, then need to read each separately - if request is not "": + if request != "": self._streamwriter.write(request) - response = yield from asyncio.wait_for( + response = await asyncio.wait_for( _read_until(self._streamreader, b"\0"), timeout, - loop=self._io_loop, ) response = response[:-1] except ReqlAuthError: - yield from self.close() + await self.close() raise except ReqlTimeoutError as err: - yield from self.close() + await self.close() raise ReqlDriverError( "Connection interrupted during handshake with %s:%s. Error: %s" % (self._parent.host, self._parent.port, str(err)) ) except Exception as err: - yield from self.close() + await self.close() raise ReqlDriverError( "Could not connect to %s:%s. Error: %s" % (self._parent.host, self._parent.port, str(err)) @@ -260,8 +254,7 @@ def connect(self, timeout): def is_open(self): return not (self._closing or self._streamreader.at_eof()) - @asyncio.coroutine - def close(self, noreply_wait=False, token=None, exception=None): + async def close(self, noreply_wait=False, token=None, exception=None): self._closing = True if exception is not None: err_message = "Connection is closed (%s)." % str(exception) @@ -281,38 +274,39 @@ def close(self, noreply_wait=False, token=None, exception=None): if noreply_wait: noreply = Query(pQuery.NOREPLY_WAIT, token, None, None) - yield from self.run_query(noreply, False) + await self.run_query(noreply, False) self._streamwriter.close() + # Python <3.7 has no wait_closed(). + if DO_WAIT_CLOSED: + await self._streamwriter.wait_closed() # We must not wait for the _reader_task if we got an exception, because that # means that we were called from it. Waiting would lead to a deadlock. if self._reader_task and exception is None: - yield from self._reader_task + await self._reader_task return None - @asyncio.coroutine - def run_query(self, query, noreply): + async def run_query(self, query, noreply): self._streamwriter.write(query.serialize(self._parent._get_json_encoder(query))) if noreply: return None response_future = asyncio.Future() self._user_queries[query.token] = (query, response_future) - return (yield from response_future) + return (await response_future) # The _reader coroutine runs in parallel, reading responses # off of the socket and forwarding them to the appropriate Future or Cursor. # This is shut down as a consequence of closing the stream, or an error in the # socket/protocol from the server. Unexpected errors in this coroutine will # close the ConnectionInstance and be passed to any open Futures or Cursors. - @asyncio.coroutine - def _reader(self): + async def _reader(self): try: while True: - buf = yield from self._streamreader.readexactly(12) + buf = await self._streamreader.readexactly(12) (token, length,) = struct.unpack("= %s got: %s" @@ -308,7 +308,7 @@ def take_action(self, action, dest, opt, value, values, parser): values.ensure_value(dest, {})[self.metavar.lower()] = value elif action == "get_password": - values[dest] = getpass.getpass("Password for `admin`: ") + values.ensure_value('password', getpass.getpass("Password for `admin`: ")) else: super(CommonOptionChecker, self).take_action( action, dest, opt, value, values, parser diff --git a/rethinkdb/version.py b/rethinkdb/version.py index 9c4ac378..572868c2 100644 --- a/rethinkdb/version.py +++ b/rethinkdb/version.py @@ -15,4 +15,4 @@ # This file incorporates work covered by the following copyright: # Copyright 2010-2016 RethinkDB, all rights reserved. -VERSION = "2.4.0+source" +VERSION = "2.4.10.post1+source" diff --git a/scripts/convert_protofile.py b/scripts/convert_protofile.py index 98f676e3..ec80b3e8 100644 --- a/scripts/convert_protofile.py +++ b/scripts/convert_protofile.py @@ -86,9 +86,9 @@ def convertFile(inputFile, outputFile, language): assert(outputFile is not None and hasattr(outputFile, 'write')) assert(language in languageDefs) - messageRegex = re.compile('\s*(message|enum) (?P\w+) \{') - valueRegex = re.compile('\s*(?P\w+)\s*=\s*(?P\w+)') - endRegex = re.compile('\s*\}') + messageRegex = re.compile(r'\s*(message|enum) (?P\w+) \{') + valueRegex = re.compile(r'\s*(?P\w+)\s*=\s*(?P\w+)') + endRegex = re.compile(r'\s*\}') indentLevel = languageDefs[language]["initialIndentLevel"] lastIndentLevel = languageDefs[language]["initialIndentLevel"] - 1 diff --git a/setup.py b/setup.py index 62aa03c3..ded5627f 100644 --- a/setup.py +++ b/setup.py @@ -21,8 +21,6 @@ import setuptools -from rethinkdb.version import VERSION - try: import asyncio @@ -32,26 +30,22 @@ RETHINKDB_VERSION_DESCRIBE = os.environ.get("RETHINKDB_VERSION_DESCRIBE") -VERSION_RE = r"^v(?P\d+\.\d+)\.(?P\d+)?(\.(?P\w+))?$" - -if RETHINKDB_VERSION_DESCRIBE: - MATCH = re.match(VERSION_RE, RETHINKDB_VERSION_DESCRIBE) +VERSION_RE = r"(?P0|[1-9]\d*)\.(?P0|[1-9]\d*)\.(?P0|[1-9]\d*)\.(?Ppost[1-9]\d*)" - if MATCH: - VERSION = MATCH.group("version") +with open("rethinkdb/version.py", "r") as f: + version_parts = re.search(VERSION_RE, f.read()).groups() + VERSION = ".".join(filter(lambda x: x is not None, version_parts)) - if MATCH.group("patch"): - VERSION += "." + MATCH.group("patch") - if MATCH.group("post"): - VERSION += "." + MATCH.group("post") +if RETHINKDB_VERSION_DESCRIBE: + version_parts = re.match(VERSION_RE, RETHINKDB_VERSION_DESCRIBE) - with open("rethinkdb/version.py", "w") as f: - f.write('VERSION = {0}'.format(repr(VERSION))) - else: + if not version_parts: raise RuntimeError("{!r} does not match version format {!r}".format( RETHINKDB_VERSION_DESCRIBE, VERSION_RE)) + VERSION = ".".join(filter(lambda x: x is not None, version_parts.groups())) + setuptools.setup( name='rethinkdb', @@ -73,6 +67,10 @@ 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', + 'Programming Language :: Python :: 3.9', + 'Programming Language :: Python :: 3.10', + 'Programming Language :: Python :: 3.11', + 'Programming Language :: Python :: 3.12', ], packages=[ 'rethinkdb', @@ -95,9 +93,10 @@ 'rethinkdb-repl = rethinkdb.__main__:startInterpreter' ] }, - python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*", + python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, >=3.5", install_requires=[ - 'six' + 'six', + 'looseversion' ], test_suite='tests' )