Skip to content

Python3.11 compatibility #284

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 8 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 7 additions & 6 deletions rethinkdb/_dump.py
Original file line number Diff line number Diff line change
Expand Up @@ -172,13 +172,14 @@ def main(argv=None, prog=None):
options = parse_options(argv or sys.argv[1:], prog=prog)
try:
if not options.quiet:
pass #ssmith, remove options
# Print a warning about the capabilities of dump, so no one is confused (hopefully)
print(
"""\
NOTE: 'rethinkdb-dump' saves data, secondary indexes, and write hooks, but does *not* save
cluster metadata. You will need to recreate your cluster setup yourself after
you run 'rethinkdb-restore'."""
)
# print(
# """\
# NOTE: 'rethinkdb-dump' saves data, secondary indexes, and write hooks, but does *not* save
# cluster metadata. You will need to recreate your cluster setup yourself after
# you run 'rethinkdb-restore'."""
# )

try:
start_time = time.time()
Expand Down
17 changes: 9 additions & 8 deletions rethinkdb/_export.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@
import optparse
import os
import platform
import signal
# import signal
import sys
import tempfile
import time
Expand Down Expand Up @@ -273,9 +273,9 @@ def export_table(
hook_counter,
exit_event,
):
signal.signal(
signal.SIGINT, signal.SIG_DFL
) # prevent signal handlers from being set in child processes
# signal.signal(
# signal.SIGINT, signal.SIG_DFL
# ) # prevent signal handlers from being set in child processes

writer = None

Expand Down Expand Up @@ -470,9 +470,9 @@ def run_clients(options, workingDir, db_table_set):
sindex_counter = multiprocessing.Value(ctypes.c_longlong, 0)
hook_counter = multiprocessing.Value(ctypes.c_longlong, 0)

signal.signal(
signal.SIGINT, lambda a, b: abort_export(a, b, exit_event, interrupt_event)
)
# signal.signal(
# signal.SIGINT, lambda a, b: abort_export(a, b, exit_event, interrupt_event)
# )
errors = []

try:
Expand Down Expand Up @@ -552,7 +552,8 @@ def plural(num, text, plural_text):
)
)
finally:
signal.signal(signal.SIGINT, signal.SIG_DFL)
pass
# signal.signal(signal.SIGINT, signal.SIG_DFL)

if interrupt_event.is_set():
raise RuntimeError("Interrupted")
Expand Down
19 changes: 10 additions & 9 deletions rethinkdb/_import.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
import multiprocessing
import optparse
import os
import signal
# import signal
import sys
import time
import traceback
Expand Down Expand Up @@ -441,10 +441,10 @@ def read_to_queue(
ignore_signals=True,
batch_size=None,
):
if (
ignore_signals
): # ToDo: work out when we are in a worker process automatically
signal.signal(signal.SIGINT, signal.SIG_IGN) # workers should ignore these
# if (
# ignore_signals
# ): # ToDo: work out when we are in a worker process automatically
# signal.signal(signal.SIGINT, signal.SIG_IGN) # workers should ignore these

if batch_size is None:
batch_size = utils_common.default_batch_size
Expand Down Expand Up @@ -1078,7 +1078,7 @@ def parse_options(argv, prog=None):
def table_writer(
tables, options, work_queue, error_queue, warning_queue, exit_event, timing_queue
):
signal.signal(signal.SIGINT, signal.SIG_IGN) # workers should ignore these
# signal.signal(signal.SIGINT, signal.SIG_IGN) # workers should ignore these
db = table = batch = None

try:
Expand Down Expand Up @@ -1188,7 +1188,7 @@ def table_writer(


def update_progress(tables, debug, exit_event, sleep=0.2):
signal.signal(signal.SIGINT, signal.SIG_IGN) # workers should not get these
# signal.signal(signal.SIGINT, signal.SIG_IGN) # workers should not get these

# give weights to each of the tables based on file size
totalSize = sum([x.bytes_size for x in tables])
Expand Down Expand Up @@ -1269,7 +1269,7 @@ def import_tables(options, sources, files_ignored=None):
progress_bar_sleep = 0.2

# - setup KeyboardInterupt handler
signal.signal(signal.SIGINT, lambda a, b: utils_common.abort(pools, exit_event))
# signal.signal(signal.SIGINT, lambda a, b: utils_common.abort(pools, exit_event))

# - queue draining
def drain_queues():
Expand Down Expand Up @@ -1494,7 +1494,8 @@ def plural(num, text):
for key, value in sorted(timing_sums.items(), key=lambda x: x[0]):
print(" %s: %.2f" % (key, value))
finally:
signal.signal(signal.SIGINT, signal.SIG_DFL)
pass
# signal.signal(signal.SIGINT, signal.SIG_DFL)

drain_queues()

Expand Down
31 changes: 15 additions & 16 deletions rethinkdb/ast.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,21 +20,13 @@

import base64
import binascii
import collections
import datetime
import json
import sys
import threading

from rethinkdb import ql2_pb2
from rethinkdb.errors import (QueryPrinter, ReqlDriverCompileError,
ReqlDriverError, T)

if sys.version_info < (3, 3):
# python < 3.3 uses collections
import collections
else:
# but collections is deprecated from python >= 3.3
import collections.abc as collections
from rethinkdb.errors import QueryPrinter, ReqlDriverCompileError, ReqlDriverError, T

P_TERM = ql2_pb2.Term.TermType

Expand All @@ -48,6 +40,13 @@
except NameError:
xrange = range

try:
collections.abc.Callable
except AttributeError:
collections.abc.Callable = collections.Callable
collections.abc.Mapping = collections.Mapping
collections.abc.Iterable = collections.Iterable


def dict_items(dictionary):
return list(dictionary.items())
Expand Down Expand Up @@ -82,7 +81,7 @@ def clear(cls):

def expr(val, nesting_depth=20):
"""
Convert a Python primitive into a RQL primitive value
Convert a Python primitive into a RQL primitive value
"""
if not isinstance(nesting_depth, int):
raise ReqlDriverCompileError("Second argument to `r.expr` must be a number.")
Expand All @@ -92,7 +91,7 @@ def expr(val, nesting_depth=20):

if isinstance(val, RqlQuery):
return val
elif isinstance(val, collections.Callable):
elif isinstance(val, collections.abc.Callable):
return Func(val)
elif isinstance(val, (datetime.datetime, datetime.date)):
if not hasattr(val, "tzinfo") or not val.tzinfo:
Expand All @@ -113,14 +112,14 @@ def expr(val, nesting_depth=20):
return Datum(val)
elif isinstance(val, bytes):
return Binary(val)
elif isinstance(val, collections.Mapping):
elif isinstance(val, collections.abc.Mapping):
# MakeObj doesn't take the dict as a keyword args to avoid
# conflicting with the `self` parameter.
obj = {}
for k, v in dict_items(val):
obj[k] = expr(v, nesting_depth - 1)
return MakeObj(obj)
elif isinstance(val, collections.Iterable):
elif isinstance(val, collections.abc.Iterable):
val = [expr(v, nesting_depth - 1) for v in val]
return MakeArray(*val)
else:
Expand Down Expand Up @@ -767,7 +766,7 @@ def recursively_make_hashable(obj):

class ReQLEncoder(json.JSONEncoder):
"""
Default JSONEncoder subclass to handle query conversion.
Default JSONEncoder subclass to handle query conversion.
"""

def __init__(self):
Expand All @@ -787,7 +786,7 @@ def default(self, obj):

class ReQLDecoder(json.JSONDecoder):
"""
Default JSONDecoder subclass to handle pseudo-type conversion.
Default JSONDecoder subclass to handle pseudo-type conversion.
"""

def __init__(self, reql_format_opts=None):
Expand Down
Loading