diff --git a/Makefile b/Makefile index 238d87f4..8b6c95c3 100644 --- a/Makefile +++ b/Makefile @@ -55,7 +55,6 @@ test-ci: @killall rebirthdb test-remote: - curl -qo ${REMOTE_TEST_SETUP_NAME} ${REMOTE_TEST_SETUP_URL} python ${REMOTE_TEST_SETUP_NAME} pytest -m integration install-db: diff --git a/requirements-dev.txt b/requirements-dev.txt deleted file mode 100644 index 82ad5339..00000000 --- a/requirements-dev.txt +++ /dev/null @@ -1,2 +0,0 @@ -tornado>=5.0 -pytest-tornasync; python_version >= '3.5' \ No newline at end of file diff --git a/rethinkdb/_export.py b/rethinkdb/_export.py index 0a546777..e57ebd12 100755 --- a/rethinkdb/_export.py +++ b/rethinkdb/_export.py @@ -23,7 +23,7 @@ import ctypes import datetime import json -import multiprocessing as mp +import multiprocessing import numbers import optparse import os @@ -35,6 +35,8 @@ import traceback from multiprocessing.queues import SimpleQueue +import six + from rethinkdb import errors, query, utils_common from rethinkdb.logger import default_logger @@ -259,12 +261,16 @@ def export_table(db, table, directory, options, error_queue, progress_info, sind with sindex_counter.get_lock(): sindex_counter.value += len(table_info["indexes"]) # -- start the writer - ctx = mp.get_context(mp.get_start_method()) - task_queue = SimpleQueue(ctx=ctx) + if six.PY3: + ctx = multiprocessing.get_context(multiprocessing.get_start_method()) + task_queue = SimpleQueue(ctx=ctx) + else: + task_queue = SimpleQueue() + writer = None if options.format == "json": filename = directory + "/%s/%s.json" % (db, table) - writer = mp.Process( + writer = multiprocessing.Process( target=json_writer, args=( filename, @@ -274,7 +280,7 @@ def export_table(db, table, directory, options, error_queue, progress_info, sind options.format)) elif options.format == "csv": filename = directory + "/%s/%s.csv" % (db, table) - writer = mp.Process( + writer = multiprocessing.Process( target=csv_writer, args=( filename, @@ -284,7 +290,7 @@ def export_table(db, table, directory, options, error_queue, progress_info, sind error_queue)) elif options.format == "ndjson": filename = directory + "/%s/%s.ndjson" % (db, table) - writer = mp.Process( + writer = multiprocessing.Process( target=json_writer, args=( filename, @@ -389,13 +395,16 @@ def update_progress(progress_info, options): def run_clients(options, workingDir, db_table_set): # Spawn one client for each db.table, up to options.clients at a time - exit_event = mp.Event() + exit_event = multiprocessing.Event() processes = [] - ctx = mp.get_context(mp.get_start_method()) - error_queue = SimpleQueue(ctx=ctx) - interrupt_event = mp.Event() - sindex_counter = mp.Value(ctypes.c_longlong, 0) - hook_counter = mp.Value(ctypes.c_longlong, 0) + if six.PY3: + ctx = multiprocessing.get_context(multiprocessing.get_start_method()) + error_queue = SimpleQueue(ctx=ctx) + else: + error_queue = SimpleQueue() + interrupt_event = multiprocessing.Event() + sindex_counter = multiprocessing.Value(ctypes.c_longlong, 0) + hook_counter = multiprocessing.Value(ctypes.c_longlong, 0) signal.signal(signal.SIGINT, lambda a, b: abort_export(a, b, exit_event, interrupt_event)) errors = [] @@ -407,8 +416,8 @@ def run_clients(options, workingDir, db_table_set): tableSize = int(options.retryQuery("count", query.db(db).table(table).info()['doc_count_estimates'].sum())) - progress_info.append((mp.Value(ctypes.c_longlong, 0), - mp.Value(ctypes.c_longlong, tableSize))) + progress_info.append((multiprocessing.Value(ctypes.c_longlong, 0), + multiprocessing.Value(ctypes.c_longlong, tableSize))) arg_lists.append((db, table, workingDir, options, @@ -430,9 +439,9 @@ def run_clients(options, workingDir, db_table_set): processes = [process for process in processes if process.is_alive()] if len(processes) < options.clients and len(arg_lists) > 0: - newProcess = mp.Process(target=export_table, args=arg_lists.pop(0)) - newProcess.start() - processes.append(newProcess) + new_process = multiprocessing.Process(target=export_table, args=arg_lists.pop(0)) + new_process.start() + processes.append(new_process) update_progress(progress_info, options) diff --git a/rethinkdb/_import.py b/rethinkdb/_import.py index b118087d..032c57c2 100755 --- a/rethinkdb/_import.py +++ b/rethinkdb/_import.py @@ -26,13 +26,14 @@ import csv import ctypes import json -import multiprocessing as mp +import multiprocessing import optparse import os import signal import sys import time import traceback +import six from multiprocessing.queues import Queue, SimpleQueue from rethinkdb import ast, errors, query, utils_common @@ -110,12 +111,12 @@ def __init__( self.query_runner = query_runner # reporting information - self._bytes_size = mp.Value(ctypes.c_longlong, -1) - self._bytes_read = mp.Value(ctypes.c_longlong, -1) + self._bytes_size = multiprocessing.Value(ctypes.c_longlong, -1) + self._bytes_read = multiprocessing.Value(ctypes.c_longlong, -1) - self._total_rows = mp.Value(ctypes.c_longlong, -1) - self._rows_read = mp.Value(ctypes.c_longlong, 0) - self._rows_written = mp.Value(ctypes.c_longlong, 0) + self._total_rows = multiprocessing.Value(ctypes.c_longlong, -1) + self._rows_read = multiprocessing.Value(ctypes.c_longlong, 0) + self._rows_written = multiprocessing.Value(ctypes.c_longlong, 0) # source if hasattr(source, 'read'): @@ -1083,15 +1084,21 @@ def import_tables(options, sources, files_ignored=None): tables = dict(((x.db, x.table), x) for x in sources) # (db, table) => table - ctx = mp.get_context(mp.get_start_method()) + if six.PY3: + ctx = multiprocessing.get_context(multiprocessing.get_start_method()) + error_queue = SimpleQueue(ctx=ctx) + warning_queue = SimpleQueue(ctx=ctx) + timing_queue = SimpleQueue(ctx=ctx) + else: + error_queue = SimpleQueue() + warning_queue = SimpleQueue() + timing_queue = SimpleQueue() + max_queue_size = options.clients * 3 - work_queue = mp.Manager().Queue(max_queue_size) - error_queue = SimpleQueue(ctx=ctx) - warning_queue = SimpleQueue(ctx=ctx) - exit_event = mp.Event() - interrupt_event = mp.Event() + work_queue = multiprocessing.Manager().Queue(max_queue_size) - timing_queue = SimpleQueue(ctx=ctx) + exit_event = multiprocessing.Event() + interrupt_event = multiprocessing.Event() errors = [] warnings = [] @@ -1168,7 +1175,7 @@ def drain_queues(): try: # - start the progress bar if not options.quiet: - progress_bar = mp.Process( + progress_bar = multiprocessing.Process( target=update_progress, name="progress bar", args=(sources, options.debug, exit_event, progress_bar_sleep) @@ -1180,7 +1187,7 @@ def drain_queues(): writers = [] pools.append(writers) for i in range(options.clients): - writer = mp.Process( + writer = multiprocessing.Process( target=table_writer, name="table writer %d" % i, @@ -1204,7 +1211,7 @@ def drain_queues(): # add a workers to fill up the readers pool while len(readers) < options.clients: table = next(file_iter) - reader = mp.Process( + reader = multiprocessing.Process( target=table.read_to_queue, name="table reader %s.%s" % (table.db, diff --git a/rethinkdb/utils_common.py b/rethinkdb/utils_common.py index 8d3c9ac1..823e2fa9 100644 --- a/rethinkdb/utils_common.py +++ b/rethinkdb/utils_common.py @@ -129,7 +129,7 @@ def check_minimum_version(options, minimum_version='1.6'): version_string = options.retryQuery('get server version', query.db( 'rethinkdb').table('server_status')[0]['process']['version']) - matches = re.match(r'rethinkdb (?P(\d+)\.(\d+)\.(\d+)).*', version_string) + matches = re.match(r'(rethinkdb|rebirthdb) (?P(\d+)\.(\d+)\.(\d+)).*', version_string) if not matches: raise RuntimeError("invalid version string format: %s" % version_string) @@ -285,11 +285,11 @@ def take_action(self, action, dest, opt, value, values, parser): '--connect', dest='driver_port', metavar='HOST:PORT', - help='host and client port of a rethinkdb node to connect (default: localhost:%d)' % - net.DEFAULT_PORT, + help='host and client port of a rethinkdb node to connect (default: localhost:%d)' % net.DEFAULT_PORT, action='callback', callback=combined_connect_action, - type='string') + type='str' + ) connection_group.add_option( '--driver-port', dest='driver_port', diff --git a/setup.py b/setup.py index e9d5894c..22f99249 100644 --- a/setup.py +++ b/setup.py @@ -44,10 +44,11 @@ if MATCH.group("post"): VERSION += "." + MATCH.group("post") - with open("rethinkdb/version.py", "w") as ostream: - print("# Autogenerated version", file=ostream) - print(file=ostream) - print("VERSION", "=", repr(VERSION), file=ostream) + with open("rethinkdb/version.py", "w") as f: + f.writelines([ + "# Autogenerated version", + "VERSION = {0}".format(VERSION) + ]) else: raise RuntimeError("{!r} does not match version format {!r}".format( RETHINKDB_VERSION_DESCRIBE, VERSION_RE))