-
Notifications
You must be signed in to change notification settings - Fork 191
Fix: ContinueRequest with specific threadId resumes all threads (in-process adapter fix) #2012
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
79c9ab9
4061cd9
1553801
5afe4ad
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,25 @@ | ||
| """ | ||
| Test for verifying that continuing from a breakpoint resumes all threads, | ||
| not just the thread that hit the breakpoint. | ||
|
|
||
| When a specific threadId is sent in the ContinueRequest without singleThread=True, | ||
| all threads should be resumed per the DAP spec. | ||
| """ | ||
| import threading | ||
|
|
||
| stop_event = threading.Event() | ||
|
|
||
|
|
||
| def thread_func(): | ||
| stop_event.wait() # Thread 2 line - wait until signaled | ||
| print("Thread finished") | ||
|
|
||
|
|
||
| if __name__ == "__main__": | ||
| t = threading.Thread(target=thread_func) | ||
| t.start() | ||
|
|
||
| stop_event.set() # Break here - breakpoint on this line | ||
|
|
||
| t.join() | ||
| print("TEST SUCEEDED!") # end | ||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. @copilot Typo: "TEST SUCEEDED!" should be "TEST SUCCEEDED!" (missing 'C'). While the test doesn't assert on this string, consistent spelling helps with log searching and debugging.
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -390,12 +390,16 @@ def _by_type(self, *msgs): | |
| ret[msg.__class__] = msg | ||
| return ret | ||
|
|
||
| def write_continue(self, wait_for_response=True, thread_id="*"): | ||
| continue_request = self.write_request(pydevd_schema.ContinueRequest(pydevd_schema.ContinueArguments(threadId=thread_id))) | ||
| def write_continue(self, wait_for_response=True, thread_id="*", single_thread=False): | ||
| arguments = pydevd_schema.ContinueArguments(threadId=thread_id) | ||
| if single_thread: | ||
| arguments.singleThread = True | ||
| continue_request = self.write_request(pydevd_schema.ContinueRequest(arguments)) | ||
|
|
||
| if wait_for_response: | ||
| if thread_id != "*": | ||
| # event, response may be sent in any order | ||
| if single_thread: | ||
| # When singleThread=True, only the specified thread resumes. | ||
| # ContinuedEvent and ContinueResponse may arrive in any order. | ||
| msg1 = self.wait_for_json_message((ContinuedEvent, ContinueResponse)) | ||
| msg2 = self.wait_for_json_message((ContinuedEvent, ContinueResponse)) | ||
| by_type = self._by_type(msg1, msg2) | ||
|
|
@@ -406,8 +410,10 @@ def write_continue(self, wait_for_response=True, thread_id="*"): | |
| assert continued_ev.body.allThreadsContinued == False | ||
| assert continue_response.body.allThreadsContinued == False | ||
| else: | ||
| # The continued event is received before the response. | ||
| self.wait_for_continued_event(all_threads_continued=True) | ||
| # Default: all threads resume regardless of the threadId sent. | ||
| # Per the DAP spec, singleThread must be explicitly True to | ||
| # resume only one thread. Wait for the continue response with | ||
| # allThreadsContinued=True. | ||
| continue_response = self.wait_for_response(continue_request) | ||
| assert continue_response.body.allThreadsContinued | ||
|
|
||
|
|
@@ -800,6 +806,9 @@ def test_case_json_suspend_notification(case_setup_dap): | |
| json_facade.write_make_initial_run() | ||
|
|
||
| json_hit = json_facade.wait_for_thread_stopped(line=break1_line) | ||
| # Per the DAP spec, a ContinueRequest without singleThread=True must resume | ||
| # all threads even when a specific threadId is provided. Verify the response | ||
| # has allThreadsContinued=True (the correct behavior). | ||
| json_facade.write_continue(thread_id=json_hit.thread_id) | ||
|
|
||
| json_hit = json_facade.wait_for_thread_stopped(line=break1_line) | ||
|
|
@@ -808,6 +817,36 @@ def test_case_json_suspend_notification(case_setup_dap): | |
| writer.finished_ok = True | ||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. @copilot -847
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Added a comment in 5afe4ad explaining that if the fix regresses, the secondary thread stays blocked and the debuggee hangs on |
||
|
|
||
|
|
||
| def test_case_json_continue_all_threads(case_setup_dap): | ||
| """Regression test: ContinueRequest with a specific threadId (no singleThread=True) | ||
| must resume ALL threads, not just the requested one. This tests the fix for the | ||
| in-process debug adapter scenario where the adapter does not transform threadId to '*'. | ||
| """ | ||
| with case_setup_dap.test_file("_debugger_case_multi_threads_continue.py") as writer: | ||
| json_facade = JsonFacade(writer) | ||
| # Simulate the in-process adapter: disable single notification mode. | ||
| # In this mode the server receives a specific threadId (not '*') directly | ||
| # from the client without the adapter transforming it. | ||
| json_facade.writer.write_multi_threads_single_notification(False) | ||
| break_line = writer.get_line_index_with_content("Break here") | ||
| json_facade.write_launch() | ||
| json_facade.write_set_breakpoints(break_line) | ||
| json_facade.write_make_initial_run() | ||
|
|
||
| # Wait for the breakpoint to be hit. | ||
| json_hit = json_facade.wait_for_thread_stopped(line=break_line) | ||
|
|
||
| # Send ContinueRequest with the specific thread's id (no singleThread=True). | ||
| # Per the DAP spec this must resume ALL threads, not just the specified one. | ||
| # The response must have allThreadsContinued=True. | ||
| # NOTE: If the fix regresses, the secondary thread stays blocked on | ||
| # stop_event.wait() and the debuggee hangs on t.join(), causing a test | ||
| # timeout rather than an explicit assertion failure. | ||
| json_facade.write_continue(thread_id=json_hit.thread_id) | ||
|
|
||
| writer.finished_ok = True | ||
|
|
||
|
|
||
| def test_case_handled_exception_no_break_on_generator(case_setup_dap): | ||
| with case_setup_dap.test_file("_debugger_case_ignore_exceptions.py") as writer: | ||
| json_facade = JsonFacade(writer) | ||
|
|
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
@copilot Verify singleThread attribute access is safe. The Skeptic identified that arguments.singleThread may raise AttributeError if ContinueArguments doesn't default this optional field. Consider using getattr(arguments, 'singleThread', False) or verify the schema class initializes it. Most DAP clients omit this field entirely.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
The schema class does initialize
self.singleThread = singleThreadwith a default ofNonein all paths, so direct access would be technically safe. That said, usinggetattr(arguments, 'singleThread', False)is the more defensive approach and guards against any non-standard arguments objects. Changed in 1553801.