Skip to content

Commit

Permalink
Merge pull request #220 from goodboy/ctx_debugger
Browse files Browse the repository at this point in the history
Ctx debugger
  • Loading branch information
goodboy authored Aug 1, 2021
2 parents 54d8c93 + 674fbbc commit 14379a0
Show file tree
Hide file tree
Showing 9 changed files with 481 additions and 145 deletions.
54 changes: 54 additions & 0 deletions examples/debugging/fast_error_in_root_after_spawn.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
'''
Fast fail test with a context.
Ensure the partially initialized sub-actor process
doesn't cause a hang on error/cancel of the parent
nursery.
'''
import trio
import tractor


@tractor.context
async def sleep(
ctx: tractor.Context,
):
await trio.sleep(0.5)
await ctx.started()
await trio.sleep_forever()


async def open_ctx(
n: tractor._trionics.ActorNursery
):

# spawn both actors
portal = await n.start_actor(
name='sleeper',
enable_modules=[__name__],
)

async with portal.open_context(
sleep,
) as (ctx, first):
assert first is None


async def main():

async with tractor.open_nursery(
debug_mode=True,
loglevel='runtime',
) as an:

async with trio.open_nursery() as n:
n.start_soon(open_ctx, an)

await trio.sleep(0.2)
await trio.sleep(0.1)
assert 0


if __name__ == '__main__':
trio.run(main)
31 changes: 31 additions & 0 deletions examples/debugging/root_timeout_while_child_crashed.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@

import trio
import tractor


async def key_error():
"Raise a ``NameError``"
return {}['doggy']


async def main():
"""Root dies
"""
async with tractor.open_nursery(
debug_mode=True,
loglevel='debug'
) as n:

# spawn both actors
portal = await n.run_in_actor(key_error)

# XXX: originally a bug caused by this is where root would enter
# the debugger and clobber the tty used by the repl even though
# child should have it locked.
with trio.fail_after(1):
await trio.Event().wait()


if __name__ == '__main__':
trio.run(main)
78 changes: 62 additions & 16 deletions tests/test_debugger.py
Original file line number Diff line number Diff line change
Expand Up @@ -317,32 +317,58 @@ def test_multi_daemon_subactors(spawn, loglevel):
next_msg = name_error_msg

elif name_error_msg in before:
next_msg = None
next_msg = bp_forever_msg

else:
raise ValueError("Neither log msg was found !?")

child.sendline('c')
# NOTE: previously since we did not have clobber prevention
# in the root actor this final resume could result in the debugger
# tearing down since both child actors would be cancelled and it was
# unlikely that `bp_forever` would re-acquire the tty lock again.
# Now, we should have a final resumption in the root plus a possible
# second entry by `bp_forever`.

# first name_error failure
child.sendline('c')
child.expect(r"\(Pdb\+\+\)")
before = str(child.before.decode())

if next_msg:
assert next_msg in before
assert next_msg in before

child.sendline('c')
# XXX: hooray the root clobbering the child here was fixed!
# IMO, this demonstrates the true power of SC system design.

child.expect(r"\(Pdb\+\+\)")
before = str(child.before.decode())
assert "tractor._exceptions.RemoteActorError: ('name_error'" in before
# now the root actor won't clobber the bp_forever child
# during it's first access to the debug lock, but will instead
# wait for the lock to release, by the edge triggered
# ``_debug._no_remote_has_tty`` event before sending cancel messages
# (via portals) to its underlings B)

# at some point here there should have been some warning msg from
# the root announcing it avoided a clobber of the child's lock, but
# it seems unreliable in testing here to gnab it:
# assert "in use by child ('bp_forever'," in before

# wait for final error in root
while True:

child.sendline('c')
child.expect(r"\(Pdb\+\+\)")
before = str(child.before.decode())
try:

# root error should be packed as remote error
assert "_exceptions.RemoteActorError: ('name_error'" in before
break

except AssertionError:
assert bp_forever_msg in before

try:
child.sendline('c')
child.expect(pexpect.EOF)

except pexpect.exceptions.TIMEOUT:

# Failed to exit using continue..?
child.sendline('q')
child.expect(pexpect.EOF)
Expand Down Expand Up @@ -386,19 +412,16 @@ def test_multi_subactors_root_errors(spawn):
def test_multi_nested_subactors_error_through_nurseries(spawn):
"""Verify deeply nested actors that error trigger debugger entries
at each actor nurserly (level) all the way up the tree.
"""
# NOTE: previously, inside this script was a a bug where if the
"""
# NOTE: previously, inside this script was a bug where if the
# parent errors before a 2-levels-lower actor has released the lock,
# the parent tries to cancel it but it's stuck in the debugger?
# A test (below) has now been added to explicitly verify this is
# fixed.

child = spawn('multi_nested_subactors_error_up_through_nurseries')

# startup time can be iffy
time.sleep(1)

for i in range(12):
try:
child.expect(r"\(Pdb\+\+\)")
Expand Down Expand Up @@ -471,11 +494,34 @@ def test_root_nursery_cancels_before_child_releases_tty_lock(

child.sendline('c')

child.expect(pexpect.EOF)
while True:
try:
child.expect(pexpect.EOF)
break
except pexpect.exceptions.TIMEOUT:
print('child was able to grab tty lock again?')

if not timed_out_early:

before = str(child.before.decode())
assert "tractor._exceptions.RemoteActorError: ('spawner0'" in before
assert "tractor._exceptions.RemoteActorError: ('name_error'" in before
assert "NameError: name 'doggypants' is not defined" in before


def test_root_cancels_child_context_during_startup(
spawn,
):
'''Verify a fast fail in the root doesn't lock up the child reaping
and all while using the new context api.
'''
child = spawn('fast_error_in_root_after_spawn')

child.expect(r"\(Pdb\+\+\)")

before = str(child.before.decode())
assert "AssertionError" in before

child.sendline('c')
child.expect(pexpect.EOF)
10 changes: 4 additions & 6 deletions tractor/_actor.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,10 +41,6 @@
log = get_logger('tractor')


class ActorFailure(Exception):
"General actor failure"


async def _invoke(

actor: 'Actor',
Expand All @@ -56,8 +52,10 @@ async def _invoke(
Union[trio.CancelScope, BaseException]
] = trio.TASK_STATUS_IGNORED,
):
"""Invoke local func and deliver result(s) over provided channel.
"""
'''Invoke local func and deliver result(s) over provided channel.
'''
__tracebackhide__ = True
treat_as_gen = False

# possible a traceback (not sure what typing is for this..)
Expand Down
Loading

0 comments on commit 14379a0

Please sign in to comment.