pypy3.9: pickle.PicklingError in asgiref
This one fails in asgiref. It's definitely a regression compared to PyPy3.8 but I'm not 100% sure what's wrong and whether the code is correct. Please let me know if I should file a bug against asgiref.
$ tox -e pypy3-test
[...]
=================================== FAILURES ===================================
_____________________________ test_multiprocessing _____________________________
@pytest.mark.asyncio
async def test_multiprocessing():
"""
Tests that a forked process can use async_to_sync without it looking for
the event loop from the parent process.
"""
test_queue = multiprocessing.Queue()
async def async_process():
test_queue.put(42)
def sync_process():
"""Runs async_process synchronously"""
async_to_sync(async_process)()
def fork_first():
"""Forks process before running sync_process"""
fork = multiprocessing.Process(target=sync_process)
fork.start()
fork.join(3)
# Force cleanup in failed test case
if fork.is_alive():
fork.terminate()
return test_queue.get(True, 1)
> assert await sync_to_async(fork_first)() == 42
tests/test_sync.py:636:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
asgiref/sync.py:414: in __call__
ret = await asyncio.wait_for(future, timeout=None)
/usr/lib/pypy3.9/asyncio/tasks.py:442: in wait_for
return await fut
/usr/lib/pypy3.9/asyncio/futures.py:284: in __await__
yield self # This tells Task to wait for completion.
/usr/lib/pypy3.9/asyncio/tasks.py:328: in __wakeup
future.result()
/usr/lib/pypy3.9/asyncio/futures.py:201: in result
raise self._exception
/usr/lib/pypy3.9/concurrent/futures/thread.py:58: in run
result = self.fn(*self.args, **self.kwargs)
/usr/lib/pypy3.9/_functools.py:81: in __call__
return self._func(*(self._args + fargs), **fkeywords)
asgiref/sync.py:455: in thread_handler
return func(*args, **kwargs)
/usr/lib/pypy3.9/_contextvars.py:44: in run
return callable(*args, **kwargs)
/usr/lib/pypy3.9/_functools.py:81: in __call__
return self._func(*(self._args + fargs), **fkeywords)
tests/test_sync.py:629: in fork_first
fork.start()
/usr/lib/pypy3.9/multiprocessing/process.py:121: in start
self._popen = self._Popen(self)
/usr/lib/pypy3.9/multiprocessing/context.py:224: in _Popen
return _default_context.get_context().Process._Popen(process_obj)
/usr/lib/pypy3.9/multiprocessing/context.py:284: in _Popen
return Popen(process_obj)
/usr/lib/pypy3.9/multiprocessing/popen_spawn_posix.py:32: in __init__
super().__init__(process_obj)
/usr/lib/pypy3.9/multiprocessing/popen_fork.py:19: in __init__
self._launch(process_obj)
/usr/lib/pypy3.9/multiprocessing/popen_spawn_posix.py:47: in _launch
reduction.dump(process_obj, fp)
/usr/lib/pypy3.9/multiprocessing/reduction.py:60: in dump
ForkingPickler(file, protocol).dump(obj)
/usr/lib/pypy3.9/pickle.py:500: in dump
self.save(obj)
/usr/lib/pypy3.9/pickle.py:616: in save
self.save_reduce(obj=obj, *rv)
/usr/lib/pypy3.9/pickle.py:730: in save_reduce
save(state)
/usr/lib/pypy3.9/pickle.py:573: in save
f(self, obj) # Call unbound method with explicit self
/usr/lib/pypy3.9/pickle.py:985: in save_dict
self._batch_setitems(obj.items())
/usr/lib/pypy3.9/pickle.py:1011: in _batch_setitems
save(v)
/usr/lib/pypy3.9/pickle.py:573: in save
f(self, obj) # Call unbound method with explicit self
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <multiprocessing.reduction.ForkingPickler object at 0x00007ff8f90fcec8>
obj = <function test_multiprocessing.<locals>.sync_process at 0x00007ff8fa0265c0>
name = 'test_multiprocessing.<locals>.sync_process'
def save_global(self, obj, name=None):
write = self.write
memo = self.memo
if name is None:
name = getattr(obj, '__qualname__', None)
if name is None:
name = obj.__name__
module_name = whichmodule(obj, name)
try:
__import__(module_name, level=0)
module = sys.modules[module_name]
obj2, parent = _getattribute(module, name)
except (ImportError, KeyError, AttributeError):
> raise PicklingError(
"Can't pickle %r: it's not found as %s.%s" %
(obj, module_name, name)) from None
E pickle.PicklingError: Can't pickle <function test_multiprocessing.<locals>.sync_process at 0x00007ff8fa0265c0>: it's not found as test_sync.test_multiprocessing.<locals>.sync_process
/usr/lib/pypy3.9/pickle.py:1084: PicklingError
------------------------------ Captured log call -------------------------------
ERROR asyncio:base_events.py:1738 Task was destroyed but it is pending!
task: <Task pending name='Task-29' coro=<StatelessServer.application_checker() running at /home/mgorny/asgiref/asgiref/server.py:136> wait_for=<Future pending cb=[Task.__wakeup()]>>
=========================== short test summary info ============================
FAILED tests/test_sync.py::test_multiprocessing - pickle.PicklingError: Can't...
=================== 1 failed, 61 passed, 1 xfailed in 14.29s ===================