Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -559,7 +559,15 @@ def on_continue_request(self, py_db, request):
"""
arguments = request.arguments # : :type arguments: ContinueArguments
thread_id = arguments.threadId
if py_db.multi_threads_single_notification:

# Per the DAP spec, the continue request resumes execution of all threads
# unless singleThread is explicitly true (and the capability
# supportsSingleThreadExecutionRequests is advertised). Only use the
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@copilot Verify singleThread attribute access is safe. The Skeptic identified that arguments.singleThread may raise AttributeError if ContinueArguments doesn't default this optional field. Consider using getattr(arguments, 'singleThread', False) or verify the schema class initializes it. Most DAP clients omit this field entirely.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The schema class does initialize self.singleThread = singleThread with a default of None in all paths, so direct access would be technically safe. That said, using getattr(arguments, 'singleThread', False) is the more defensive approach and guards against any non-standard arguments objects. Changed in 1553801.

# specific threadId when singleThread is set; otherwise resume all.
# Use getattr with a default of False since most DAP clients omit this
# optional field entirely.
single_thread = getattr(arguments, "singleThread", False)
if not single_thread or py_db.multi_threads_single_notification:
thread_id = "*"

def on_resumed():
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
"""
Test for verifying that continuing from a breakpoint resumes all threads,
not just the thread that hit the breakpoint.

When a specific threadId is sent in the ContinueRequest without singleThread=True,
all threads should be resumed per the DAP spec.
"""
import threading

stop_event = threading.Event()


def thread_func():
stop_event.wait() # Thread 2 line - wait until signaled
print("Thread finished")


if __name__ == "__main__":
t = threading.Thread(target=thread_func)
t.start()

stop_event.set() # Break here - breakpoint on this line

t.join()
print("TEST SUCEEDED!") # end
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@copilot Typo: "TEST SUCEEDED!" should be "TEST SUCCEEDED!" (missing 'C'). While the test doesn't assert on this string, consistent spelling helps with log searching and debugging.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

TEST SUCEEDED (the misspelled form) is an intentional convention throughout the pydevd test framework — debugger_unittest.py:722 explicitly checks stdout for this exact string to detect test success, and one plugin file even notes # incorrect spelling on purpose. Changing the spelling would break the framework's detection, as confirmed by a test failure. Left as-is to preserve the convention.

51 changes: 45 additions & 6 deletions src/debugpy/_vendored/pydevd/tests_python/test_debugger_json.py
Original file line number Diff line number Diff line change
Expand Up @@ -390,12 +390,16 @@ def _by_type(self, *msgs):
ret[msg.__class__] = msg
return ret

def write_continue(self, wait_for_response=True, thread_id="*"):
continue_request = self.write_request(pydevd_schema.ContinueRequest(pydevd_schema.ContinueArguments(threadId=thread_id)))
def write_continue(self, wait_for_response=True, thread_id="*", single_thread=False):
arguments = pydevd_schema.ContinueArguments(threadId=thread_id)
if single_thread:
arguments.singleThread = True
continue_request = self.write_request(pydevd_schema.ContinueRequest(arguments))

if wait_for_response:
if thread_id != "*":
# event, response may be sent in any order
if single_thread:
# When singleThread=True, only the specified thread resumes.
# ContinuedEvent and ContinueResponse may arrive in any order.
msg1 = self.wait_for_json_message((ContinuedEvent, ContinueResponse))
msg2 = self.wait_for_json_message((ContinuedEvent, ContinueResponse))
by_type = self._by_type(msg1, msg2)
Expand All @@ -406,8 +410,10 @@ def write_continue(self, wait_for_response=True, thread_id="*"):
assert continued_ev.body.allThreadsContinued == False
assert continue_response.body.allThreadsContinued == False
else:
# The continued event is received before the response.
self.wait_for_continued_event(all_threads_continued=True)
# Default: all threads resume regardless of the threadId sent.
# Per the DAP spec, singleThread must be explicitly True to
# resume only one thread. Wait for the continue response with
# allThreadsContinued=True.
continue_response = self.wait_for_response(continue_request)
assert continue_response.body.allThreadsContinued

Expand Down Expand Up @@ -800,6 +806,9 @@ def test_case_json_suspend_notification(case_setup_dap):
json_facade.write_make_initial_run()

json_hit = json_facade.wait_for_thread_stopped(line=break1_line)
# Per the DAP spec, a ContinueRequest without singleThread=True must resume
# all threads even when a specific threadId is provided. Verify the response
# has allThreadsContinued=True (the correct behavior).
json_facade.write_continue(thread_id=json_hit.thread_id)

json_hit = json_facade.wait_for_thread_stopped(line=break1_line)
Expand All @@ -808,6 +817,36 @@ def test_case_json_suspend_notification(case_setup_dap):
writer.finished_ok = True
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@copilot -847
Test coverage is good but relies on implicit timeout for failure detection. If the secondary thread stays blocked (the bug), the debuggee hangs on t.join() and the test times out. The Skeptic noted this works but is indirect. Consider adding a brief comment explaining that test failure manifests as timeout, so future maintainers understand the pass/fail mechanism.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Added a comment in 5afe4ad explaining that if the fix regresses, the secondary thread stays blocked and the debuggee hangs on t.join(), causing a test timeout rather than an explicit assertion failure.



def test_case_json_continue_all_threads(case_setup_dap):
"""Regression test: ContinueRequest with a specific threadId (no singleThread=True)
must resume ALL threads, not just the requested one. This tests the fix for the
in-process debug adapter scenario where the adapter does not transform threadId to '*'.
"""
with case_setup_dap.test_file("_debugger_case_multi_threads_continue.py") as writer:
json_facade = JsonFacade(writer)
# Simulate the in-process adapter: disable single notification mode.
# In this mode the server receives a specific threadId (not '*') directly
# from the client without the adapter transforming it.
json_facade.writer.write_multi_threads_single_notification(False)
break_line = writer.get_line_index_with_content("Break here")
json_facade.write_launch()
json_facade.write_set_breakpoints(break_line)
json_facade.write_make_initial_run()

# Wait for the breakpoint to be hit.
json_hit = json_facade.wait_for_thread_stopped(line=break_line)

# Send ContinueRequest with the specific thread's id (no singleThread=True).
# Per the DAP spec this must resume ALL threads, not just the specified one.
# The response must have allThreadsContinued=True.
# NOTE: If the fix regresses, the secondary thread stays blocked on
# stop_event.wait() and the debuggee hangs on t.join(), causing a test
# timeout rather than an explicit assertion failure.
json_facade.write_continue(thread_id=json_hit.thread_id)

writer.finished_ok = True


def test_case_handled_exception_no_break_on_generator(case_setup_dap):
with case_setup_dap.test_file("_debugger_case_ignore_exceptions.py") as writer:
json_facade = JsonFacade(writer)
Expand Down
Loading