Thanks to visit codestin.com
Credit goes to github.com

Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
Show all changes
39 commits
Select commit Hold shift + click to select a range
5c69d38
Make ThreadPoolExecutor extensible.
ericsnowcurrently Sep 25, 2024
01789be
Add InterpreterPoolExecutor.
ericsnowcurrently Sep 25, 2024
6def4be
Clean up the interpreter if initialize() fails.
ericsnowcurrently Sep 27, 2024
84993a5
Add a missing import.
ericsnowcurrently Sep 27, 2024
c540cf0
Fix some typos.
ericsnowcurrently Sep 27, 2024
45d584d
Add more tests.
ericsnowcurrently Sep 27, 2024
c90c016
Add docs.
ericsnowcurrently Sep 27, 2024
1cb4657
Add a NEwS entry.
ericsnowcurrently Sep 27, 2024
4dc0989
Fix the last test.
ericsnowcurrently Sep 27, 2024
57b2db6
Add more tests.
ericsnowcurrently Sep 27, 2024
75e11d2
Simplify ExecutionFailed.
ericsnowcurrently Sep 30, 2024
69c2b8e
Fix the signature of resolve_task().
ericsnowcurrently Sep 30, 2024
f03c314
Capture any uncaught exception.
ericsnowcurrently Sep 30, 2024
4806d9f
Add TODO comments.
ericsnowcurrently Sep 30, 2024
efc0395
Docs fixes.
ericsnowcurrently Sep 30, 2024
a29aee3
Automatically apply textwrap.dedent() to scripts.
ericsnowcurrently Sep 30, 2024
8bab457
Fix the WASI build.
ericsnowcurrently Sep 30, 2024
cd29914
wasi
ericsnowcurrently Oct 1, 2024
0287f3b
Ignore race in test.
ericsnowcurrently Oct 1, 2024
80cd7b1
Add BrokenInterpreterPool.
ericsnowcurrently Oct 8, 2024
f8d4273
Tweak the docs.
ericsnowcurrently Oct 8, 2024
3a8bfce
Clarify the InterpreterPoolExecutor docs.
ericsnowcurrently Oct 8, 2024
af6c27a
Catch all exceptions.
ericsnowcurrently Oct 8, 2024
8c0a405
Factor out exception serialization helpers.
ericsnowcurrently Oct 8, 2024
1ae7ca2
Set the ExecutionFailed error as __cause__.
ericsnowcurrently Oct 8, 2024
d24e85d
Drop the exception serialization helpers.
ericsnowcurrently Oct 8, 2024
05a03ad
Always finalize if there is an error in initialize().
ericsnowcurrently Oct 8, 2024
f150931
Explicitly note the problem with functions defined in __main__.
ericsnowcurrently Oct 8, 2024
97d0292
Handle the case where interpreters.queues doesn't exist.
ericsnowcurrently Oct 8, 2024
baf0504
Merge branch 'main' into interpreter-pool-executor
ericsnowcurrently Oct 15, 2024
5c3a327
Add a What's New entry about InterpreterPoolExecutor.
ericsnowcurrently Oct 15, 2024
a2032a8
Fix a typo.
ericsnowcurrently Oct 15, 2024
54119b8
Fix the documented signature.
ericsnowcurrently Oct 15, 2024
744dca7
Test and document asyncio support.
ericsnowcurrently Oct 15, 2024
f61d62d
Apply suggestions from code review
ericsnowcurrently Oct 16, 2024
ee65bb2
Expand the docs.
ericsnowcurrently Oct 16, 2024
a7f5c50
For now, drop support for scripts.
ericsnowcurrently Oct 16, 2024
b148e09
Fix a TODO comment.
ericsnowcurrently Oct 16, 2024
e365ae7
Fix the docs.
ericsnowcurrently Oct 16, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Add InterpreterPoolExecutor.
  • Loading branch information
ericsnowcurrently committed Sep 27, 2024
commit 01789be00f372c65b0519eab1580faa5491e0e2e
6 changes: 6 additions & 0 deletions Lib/concurrent/futures/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
'Executor',
'wait',
'as_completed',
'InterpreterPoolExecutor',
'ProcessPoolExecutor',
'ThreadPoolExecutor',
)
Expand All @@ -51,4 +52,9 @@ def __getattr__(name):
ThreadPoolExecutor = te
return te

if name == 'InterpreterPoolExecutor':
from .interpreter import InterpreterPoolExecutor as ie
InterpreterPoolExecutor = ie
return ie

raise AttributeError(f"module {__name__!r} has no attribute {name!r}")
183 changes: 183 additions & 0 deletions Lib/concurrent/futures/interpreter.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,183 @@
"""Implements InterpreterPoolExecutor."""

import pickle
from . import thread as _thread
import _interpreters
import _interpqueues


LINESEP = '''
'''
Comment thread
ZeroIntensity marked this conversation as resolved.
Outdated


_EXEC_FAILURE_STR = """
{superstr}

Uncaught in the interpreter:

{formatted}
""".strip()


class ExecutionFailed(_interpreters.InterpreterError):
"""An unhandled exception happened during execution."""

def __init__(self, excinfo):
msg = excinfo.formatted
if not msg:
if excinfo.type and excinfo.msg:
msg = f'{excinfo.type.__name__}: {excinfo.msg}'
else:
msg = excinfo.type.__name__ or excinfo.msg
super().__init__(msg)
self.excinfo = excinfo

def __str__(self):
try:
formatted = self.excinfo.errdisplay
except Exception:
return super().__str__()
else:
return _EXEC_FAILURE_STR.format(
superstr=super().__str__(),
formatted=formatted,
)


UNBOUND = 2 # error; this should not happen.


class WorkerContext(_thread.WorkerContext):

@classmethod
def prepare(cls, initializer, initargs, shared):
if isinstance(initializer, str):
if initargs:
raise ValueError(f'an initializer script does not take args, got {args!r}')
initscript = initializer
# Make sure the script compiles.
# XXX Keep the compiled code object?
compile(script, '<string>', 'exec')
elif initializer is not None:
pickled = pickle.dumps((initializer, initargs))
initscript = f'''if True:
initializer, initargs = pickle.loads({pickled!r})
initializer(*initargs)
'''
else:
initscript = None
def create_context():
return cls(initscript, shared)
def resolve_task(cls, fn, args, kwargs):
if isinstance(fn, str):
if args or kwargs:
raise ValueError(f'a script does not take args or kwargs, got {args!r} and {kwargs!r}')
data = fn
kind = 'script'
else:
data = pickle.dumps((fn, args, kwargs))
kind = 'function'
return (data, kind)
return create_context, resolve_task

@classmethod
def _run_pickled_func(cls, data, resultsid):
fn, args, kwargs = pickle.loads(data)
res = fn(*args, **kwargs)
# Send the result back.
try:
_interpqueues.put(resultsid, res, 0, UNBOUND)
except _interpreters.NotShareableError:
res = pickle.dumps(res)
_interpqueues.put(resultsid, res, 1, UNBOUND)

def __init__(self, initscript, shared=None):
self.initscript = initscript or ''
self.shared = dict(shared) if shared else None
self.interpid = None
self.resultsid = None

def __del__(self):
if self.interpid is not None:
self.finalize()

def _exec(self, script):
assert self.interpid is not None
excinfo = _interpreters.exec(self.interpid, script, restrict=True)
if excinfo is not None:
raise ExecutionFailed(excinfo)

def initialize(self):
assert self.interpid is None, self.interpid
self.interpid = _interpreters.create(reqrefs=True)
# This may raise InterpreterNotFoundError:
_interpreters.incref(self.interpid)

maxsize = 0
fmt = 0
self.resultsid = _interpqueues.create(maxsize, fmt, UNBOUND)

initscript = f"""if True:
from {__name__} import WorkerContext
"""
initscript += LINESEP + self.initscript
# for i, line in enumerate(initscript.splitlines(), 1):
# print(f'{i:>3} {line}')
self._exec(initscript)
if self.shared:
_interpreters.set___main___attrs(
self.interpid, self.shared, restrict=True)

def finalize(self):
interpid = self.interpid
resultsid = self.resultsid
self.resultsid = None
self.interpid = None
assert interpid is not None
assert resultsid is not None
try:
_interpqueues.destroy(resultsid)
except _interpqueues.QueueNotFoundError:
pass
try:
_interpreters.decref(interpid)
except _interpreters.InterpreterNotFoundError:
pass

def run(self, task):
data, kind = task
if kind == 'script':
self._exec(script)
return None
elif kind == 'function':
self._exec(
f'WorkerContext._run_pickled_func({data}, {self.resultsid})')
obj, pickled, unboundop = _interpqueues.get(self.resultsid)
assert unboundop is None, unboundop
return pickle.loads(obj) if pickled else obj
else:
raise NotImplementedError(kind)


class InterpreterPoolExecutor(_thread.ThreadPoolExecutor):

@classmethod
def prepare_context(cls, initializer, initargs, shared):
return WorkerContext.prepare(initializer, initargs, shared)

def __init__(self, max_workers=None, thread_name_prefix='',
initializer=None, initargs=(), shared=None):
"""Initializes a new InterpreterPoolExecutor instance.

Args:
max_workers: The maximum number of interpreters that can be used to
execute the given calls.
thread_name_prefix: An optional name prefix to give our threads.
initializer: A callable or script used to initialize
each worker interpreter.
initargs: A tuple of arguments to pass to the initializer.
shared: A mapping of shareabled objects to be inserted into
each worker interpreter.
"""
super().__init__(max_workers, thread_name_prefix,
initializer, initargs, shared=shared)
7 changes: 6 additions & 1 deletion Lib/test/test_concurrent_futures/executor.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,10 @@ def make_dummy_object(_):


class ExecutorTest:

def assertTaskRaises(self, exctype):
return self.assertRaises(exctype)

# Executor.shutdown() and context manager usage is tested by
# ExecutorShutdownTest.
def test_submit(self):
Expand Down Expand Up @@ -52,7 +56,8 @@ def test_map_exception(self):
i = self.executor.map(divmod, [1, 1, 1, 1], [2, 3, 0, 5])
self.assertEqual(i.__next__(), (0, 1))
self.assertEqual(i.__next__(), (0, 1))
self.assertRaises(ZeroDivisionError, i.__next__)
with self.assertTaskRaises(ZeroDivisionError):
i.__next__()

@support.requires_resource('walltime')
def test_map_timeout(self):
Expand Down
70 changes: 70 additions & 0 deletions Lib/test/test_concurrent_futures/test_interpreter_pool.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
import unittest
from concurrent.futures.interpreter import ExecutionFailed
from test import support
from test.support.interpreters import queues

from .executor import ExecutorTest, mul
from .util import BaseTestCase, InterpreterPoolMixin, setup_module


class InterpreterPoolExecutorTest(InterpreterPoolMixin, ExecutorTest, BaseTestCase):

def assertTaskRaises(self, exctype):
return self.assertRaisesRegex(ExecutionFailed, exctype.__name__)

def test_saturation(self):
blocker = queues.create()
executor = self.executor_type(4, shared=dict(blocker=blocker))

for i in range(15 * executor._max_workers):
executor.submit('blocker.get()')
self.assertEqual(len(executor._threads), executor._max_workers)
for i in range(15 * executor._max_workers):
blocker.put_nowait(None)
executor.shutdown(wait=True)

@support.requires_gil_enabled("gh-117344: test is flaky without the GIL")
def test_idle_thread_reuse(self):
executor = self.executor_type()
executor.submit(mul, 21, 2).result()
executor.submit(mul, 6, 7).result()
executor.submit(mul, 3, 14).result()
self.assertEqual(len(executor._threads), 1)
executor.shutdown(wait=True)

# def test_executor_map_current_future_cancel(self):
# blocker = queues.create()
# log = queues.create()
#
# script = """if True:
# def log_n_wait({ident}):
# blocker(f"ident {ident} started")
# try:
# stop_event.wait()
# finally:
# log.append(f"ident {ident} stopped")
# """
#
# with self.executor_type(max_workers=1) as pool:
# # submit work to saturate the pool
# fut = pool.submit(script.format(ident="first"))
# gen = pool.map(log_n_wait, ["second", "third"], timeout=0)
# try:
# with self.assertRaises(TimeoutError):
# next(gen)
# finally:
# gen.close()
# blocker.put
# stop_event.set()
# fut.result()
# # ident='second' is cancelled as a result of raising a TimeoutError
# # ident='third' is cancelled because it remained in the collection of futures
# self.assertListEqual(log, ["ident='first' started", "ident='first' stopped"])


def setUpModule():
setup_module()


if __name__ == "__main__":
unittest.main()
5 changes: 5 additions & 0 deletions Lib/test/test_concurrent_futures/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,10 @@ class ThreadPoolMixin(ExecutorMixin):
executor_type = futures.ThreadPoolExecutor


class InterpreterPoolMixin(ExecutorMixin):
executor_type = futures.InterpreterPoolExecutor


class ProcessPoolForkMixin(ExecutorMixin):
executor_type = futures.ProcessPoolExecutor
ctx = "fork"
Expand Down Expand Up @@ -120,6 +124,7 @@ def get_context(self):

def create_executor_tests(remote_globals, mixin, bases=(BaseTestCase,),
executor_mixins=(ThreadPoolMixin,
InterpreterPoolMixin,
ProcessPoolForkMixin,
ProcessPoolForkserverMixin,
ProcessPoolSpawnMixin)):
Expand Down