aboutsummaryrefslogtreecommitdiff
path: root/gdb/python/lib
diff options
context:
space:
mode:
Diffstat (limited to 'gdb/python/lib')
-rw-r--r--gdb/python/lib/gdb/__init__.py153
-rw-r--r--gdb/python/lib/gdb/dap/breakpoint.py6
-rw-r--r--gdb/python/lib/gdb/dap/completions.py7
-rw-r--r--gdb/python/lib/gdb/dap/evaluate.py6
-rw-r--r--gdb/python/lib/gdb/dap/events.py6
-rw-r--r--gdb/python/lib/gdb/dap/next.py15
-rw-r--r--gdb/python/lib/gdb/dap/server.py90
-rw-r--r--gdb/python/lib/gdb/dap/sources.py6
-rw-r--r--gdb/python/lib/gdb/dap/threads.py23
9 files changed, 226 insertions, 86 deletions
diff --git a/gdb/python/lib/gdb/__init__.py b/gdb/python/lib/gdb/__init__.py
index 4ea5d06..cedd897 100644
--- a/gdb/python/lib/gdb/__init__.py
+++ b/gdb/python/lib/gdb/__init__.py
@@ -19,17 +19,22 @@ import sys
import threading
import traceback
from contextlib import contextmanager
+from importlib import reload
-# Python 3 moved "reload"
-if sys.version_info >= (3, 4):
- from importlib import reload
-else:
- from imp import reload
-
-import _gdb
-
+# The star import imports _gdb names. When the names are used locally, they
+# trigger F405 warnings unless added to the explicit import list.
# Note that two indicators are needed here to silence flake8.
from _gdb import * # noqa: F401,F403
+from _gdb import (
+ STDERR,
+ STDOUT,
+ Command,
+ execute,
+ flush,
+ parameter,
+ selected_inferior,
+ write,
+)
# isort: split
@@ -60,14 +65,14 @@ class _GdbFile(object):
self.write(line)
def flush(self):
- _gdb.flush(stream=self.stream)
+ flush(stream=self.stream)
def write(self, s):
- _gdb.write(s, stream=self.stream)
+ write(s, stream=self.stream)
-sys.stdout = _GdbFile(_gdb.STDOUT)
-sys.stderr = _GdbFile(_gdb.STDERR)
+sys.stdout = _GdbFile(STDOUT)
+sys.stderr = _GdbFile(STDERR)
# Default prompt hook does nothing.
prompt_hook = None
@@ -189,7 +194,7 @@ def GdbSetPythonDirectory(dir):
def current_progspace():
"Return the current Progspace."
- return _gdb.selected_inferior().progspace
+ return selected_inferior().progspace
def objfiles():
@@ -226,14 +231,14 @@ def set_parameter(name, value):
value = "on"
else:
value = "off"
- _gdb.execute("set " + name + " " + str(value), to_string=True)
+ execute("set " + name + " " + str(value), to_string=True)
@contextmanager
def with_parameter(name, value):
"""Temporarily set the GDB parameter NAME to VALUE.
Note that this is a context manager."""
- old_value = _gdb.parameter(name)
+ old_value = parameter(name)
set_parameter(name, value)
try:
# Nothing that useful to return.
@@ -392,3 +397,121 @@ def _handle_missing_objfile(pspace, buildid, filename):
return _handle_missing_files(
pspace, "objfile", lambda h: h(pspace, buildid, filename)
)
+
+
+class ParameterPrefix:
+ # A wrapper around gdb.Command for creating set/show prefixes.
+ #
+ # When creating a gdb.Parameter sub-classes, it is sometimes necessary
+ # to first create a gdb.Command object in order to create the needed
+ # command prefix. However, for parameters, we actually need two
+ # prefixes, a 'set' prefix, and a 'show' prefix. With this helper
+ # class, a single instance of this class will create both prefixes at
+ # once.
+ #
+ # It is important that this class-level documentation not be a __doc__
+ # string. Users are expected to sub-class this ParameterPrefix class
+ # and add their own documentation. If they don't, then GDB will
+ # generate a suitable doc string. But, if this (parent) class has a
+ # __doc__ string of its own, then sub-classes will inherit that __doc__
+ # string, and GDB will not understand that it needs to generate one.
+
+ class _PrefixCommand(Command):
+ """A gdb.Command used to implement both the set and show prefixes.
+
+ This documentation string is not used as the prefix command
+ documentation as it is overridden in the __init__ method below."""
+
+ # This private method is connected to the 'invoke' attribute within
+ # this _PrefixCommand object if the containing ParameterPrefix
+ # object has an invoke_set or invoke_show method.
+ #
+ # This method records within self.__delegate which _PrefixCommand
+ # object is currently active, and then calls the correct invoke
+ # method on the delegat object (the ParameterPrefix sub-class
+ # object).
+ #
+ # Recording the currently active _PrefixCommand object is important;
+ # if from the invoke method the user calls dont_repeat, then this is
+ # forwarded to the currently active _PrefixCommand object.
+ def __invoke(self, args, from_tty):
+
+ # A helper class for use as part of a Python 'with' block.
+ # Records which gdb.Command object is currently running its
+ # invoke method.
+ class MarkActiveCallback:
+ # The CMD is a _PrefixCommand object, and the DELEGATE is
+ # the ParameterPrefix class, or sub-class object. At this
+ # point we simple record both of these within the
+ # MarkActiveCallback object.
+ def __init__(self, cmd, delegate):
+ self.__cmd = cmd
+ self.__delegate = delegate
+
+ # Record the currently active _PrefixCommand object within
+ # the outer ParameterPrefix sub-class object.
+ def __enter__(self):
+ self.__delegate.active_prefix = self.__cmd
+
+ # Once the invoke method has completed, then clear the
+ # _PrefixCommand object that was stored into the outer
+ # ParameterPrefix sub-class object.
+ def __exit__(self, exception_type, exception_value, traceback):
+ self.__delegate.active_prefix = None
+
+ # The self.__cb attribute is set when the _PrefixCommand object
+ # is created, and is either invoke_set or invoke_show within the
+ # ParameterPrefix sub-class object.
+ assert callable(self.__cb)
+
+ # Record the currently active _PrefixCommand object within the
+ # ParameterPrefix sub-class object, then call the relevant
+ # invoke method within the ParameterPrefix sub-class object.
+ with MarkActiveCallback(self, self.__delegate):
+ self.__cb(args, from_tty)
+
+ @staticmethod
+ def __find_callback(delegate, mode):
+ """The MODE is either 'set' or 'show'. Look for an invoke_MODE method
+ on DELEGATE, if a suitable method is found, then return it, otherwise,
+ return None.
+ """
+ cb = getattr(delegate, "invoke_" + mode, None)
+ if callable(cb):
+ return cb
+ return None
+
+ def __init__(self, mode, name, cmd_class, delegate, doc=None):
+ """Setup this gdb.Command. Mode is a string, either 'set' or 'show'.
+ NAME is the name for this prefix command, that is, the
+ words that appear after both 'set' and 'show' in the
+ command name. CMD_CLASS is the usual enum. And DELEGATE
+ is the gdb.ParameterPrefix object this prefix is part of.
+ """
+ assert mode == "set" or mode == "show"
+ if doc is None:
+ self.__doc__ = delegate.__doc__
+ else:
+ self.__doc__ = doc
+ self.__cb = self.__find_callback(delegate, mode)
+ self.__delegate = delegate
+ if self.__cb is not None:
+ self.invoke = self.__invoke
+ super().__init__(mode + " " + name, cmd_class, prefix=True)
+
+ def __init__(self, name, cmd_class, doc=None):
+ """Create a _PrefixCommand for both the set and show prefix commands.
+ NAME is the command name without either the leading 'set ' or
+ 'show ' strings, and CMD_CLASS is the usual enum value.
+ """
+ self.active_prefix = None
+ self._set_prefix_cmd = self._PrefixCommand("set", name, cmd_class, self, doc)
+ self._show_prefix_cmd = self._PrefixCommand("show", name, cmd_class, self, doc)
+
+ # When called from within an invoke method the self.active_prefix
+ # attribute should be set to a gdb.Command sub-class (a _PrefixCommand
+ # object, see above). Forward the dont_repeat call to this object to
+ # register the actual command as none repeating.
+ def dont_repeat(self):
+ if self.active_prefix is not None:
+ self.active_prefix.dont_repeat()
diff --git a/gdb/python/lib/gdb/dap/breakpoint.py b/gdb/python/lib/gdb/dap/breakpoint.py
index 59da120..4d4ca18 100644
--- a/gdb/python/lib/gdb/dap/breakpoint.py
+++ b/gdb/python/lib/gdb/dap/breakpoint.py
@@ -326,7 +326,7 @@ def _rewrite_fn_breakpoint(
}
-@request("setFunctionBreakpoints")
+@request("setFunctionBreakpoints", expect_stopped=False)
@capability("supportsFunctionBreakpoints")
def set_fn_breakpoint(*, breakpoints: Sequence, **args):
specs = [_rewrite_fn_breakpoint(**bp) for bp in breakpoints]
@@ -359,7 +359,7 @@ def _rewrite_insn_breakpoint(
}
-@request("setInstructionBreakpoints")
+@request("setInstructionBreakpoints", expect_stopped=False)
@capability("supportsInstructionBreakpoints")
def set_insn_breakpoints(
*, breakpoints: Sequence, offset: Optional[int] = None, **args
@@ -410,7 +410,7 @@ def _rewrite_exception_breakpoint(
}
-@request("setExceptionBreakpoints")
+@request("setExceptionBreakpoints", expect_stopped=False)
@capability("supportsExceptionFilterOptions")
@capability(
"exceptionBreakpointFilters",
diff --git a/gdb/python/lib/gdb/dap/completions.py b/gdb/python/lib/gdb/dap/completions.py
index 85acc43..e5003ff 100644
--- a/gdb/python/lib/gdb/dap/completions.py
+++ b/gdb/python/lib/gdb/dap/completions.py
@@ -39,8 +39,11 @@ def completions(
line = 1
else:
line = import_line(line)
- text = text.splitlines()[line - 1]
- text = text[: column - 1]
+ if text:
+ text = text.splitlines()[line - 1]
+ text = text[: column - 1]
+ else:
+ text = ""
mi_result = exec_mi_and_log("-complete", text)
result = []
completion = None
diff --git a/gdb/python/lib/gdb/dap/evaluate.py b/gdb/python/lib/gdb/dap/evaluate.py
index 55538e6..fcbcc99 100644
--- a/gdb/python/lib/gdb/dap/evaluate.py
+++ b/gdb/python/lib/gdb/dap/evaluate.py
@@ -69,7 +69,7 @@ def _repl(command, frame_id):
}
-@request("evaluate")
+@request("evaluate", defer_events=False)
@capability("supportsEvaluateForHovers")
@capability("supportsValueFormattingOptions")
def eval_request(
@@ -110,7 +110,7 @@ def variables(
@capability("supportsSetExpression")
-@request("setExpression")
+@request("setExpression", defer_events=False)
def set_expression(
*, expression: str, value: str, frameId: Optional[int] = None, format=None, **args
):
@@ -126,7 +126,7 @@ def set_expression(
@capability("supportsSetVariable")
-@request("setVariable")
+@request("setVariable", defer_events=False)
def set_variable(
*, variablesReference: int, name: str, value: str, format=None, **args
):
diff --git a/gdb/python/lib/gdb/dap/events.py b/gdb/python/lib/gdb/dap/events.py
index 4dc9d65..e8f2655 100644
--- a/gdb/python/lib/gdb/dap/events.py
+++ b/gdb/python/lib/gdb/dap/events.py
@@ -17,12 +17,12 @@ import gdb
from .modules import is_module, make_module
from .scopes import set_finish_value
-from .server import send_event, send_event_maybe_later
+from .server import send_event
from .startup import exec_and_log, in_gdb_thread, log
# True when the inferior is thought to be running, False otherwise.
# This may be accessed from any thread, which can be racy. However,
-# this unimportant because this global is only used for the
+# this is unimportant because this global is only used for the
# 'notStopped' response, which itself is inherently racy.
inferior_running = False
@@ -240,7 +240,7 @@ def _on_stop(event):
else:
obj["reason"] = stop_reason_map[event.details["reason"]]
_expected_pause = False
- send_event_maybe_later("stopped", obj)
+ send_event("stopped", obj)
# This keeps a bit of state between the start of an inferior call and
diff --git a/gdb/python/lib/gdb/dap/next.py b/gdb/python/lib/gdb/dap/next.py
index ddf4e05..898fff1 100644
--- a/gdb/python/lib/gdb/dap/next.py
+++ b/gdb/python/lib/gdb/dap/next.py
@@ -16,7 +16,7 @@
import gdb
from .events import exec_and_expect_stop
-from .server import capability, request, send_gdb, send_gdb_with_response
+from .server import capability, request
from .startup import in_gdb_thread
from .state import set_thread
@@ -73,19 +73,14 @@ def step_in(
exec_and_expect_stop(cmd)
-@request("stepOut", defer_stop_events=True)
+@request("stepOut")
def step_out(*, threadId: int, singleThread: bool = False, **args):
_handle_thread_step(threadId, singleThread, True)
exec_and_expect_stop("finish &", propagate_exception=True)
-# This is a server-side request because it is funny: it wants to
-# 'continue' but also return a result, which precludes using
-# response=False. Using 'continue &' would mostly work ok, but this
-# yields races when a stop occurs before the response is sent back to
-# the client.
-@request("continue", on_dap_thread=True)
+@request("continue")
def continue_request(*, threadId: int, singleThread: bool = False, **args):
- locked = send_gdb_with_response(lambda: _handle_thread_step(threadId, singleThread))
- send_gdb(lambda: exec_and_expect_stop("continue"))
+ locked = _handle_thread_step(threadId, singleThread)
+ exec_and_expect_stop("continue &")
return {"allThreadsContinued": not locked}
diff --git a/gdb/python/lib/gdb/dap/server.py b/gdb/python/lib/gdb/dap/server.py
index c4fa781..7dab582 100644
--- a/gdb/python/lib/gdb/dap/server.py
+++ b/gdb/python/lib/gdb/dap/server.py
@@ -74,6 +74,13 @@ class DeferredRequest:
self._result = result
@in_dap_thread
+ def defer_events(self):
+ """Return True if events should be deferred during execution.
+
+ This may be overridden by subclasses."""
+ return True
+
+ @in_dap_thread
def invoke(self):
"""Implement the deferred request.
@@ -95,7 +102,10 @@ class DeferredRequest:
"""
with _server.canceller.current_request(self._req):
+ if self.defer_events():
+ _server.set_defer_events()
_server.invoke_request(self._req, self._result, self.invoke)
+ _server.emit_pending_events()
# A subclass of Exception that is used solely for reporting that a
@@ -230,7 +240,7 @@ class Server:
self._out_stream = out_stream
self._child_stream = child_stream
self._delayed_fns_lock = threading.Lock()
- self.defer_stop_events = False
+ self._defer_events = False
self._delayed_fns = []
# This queue accepts JSON objects that are then sent to the
# DAP client. Writing is done in a separate thread to avoid
@@ -316,7 +326,7 @@ class Server:
def _read_inferior_output(self):
while True:
line = self._child_stream.readline()
- self.send_event(
+ self.send_event_maybe_later(
"output",
{
"category": "stdout",
@@ -356,6 +366,17 @@ class Server:
self._read_queue.put(None)
@in_dap_thread
+ def emit_pending_events(self):
+ """Emit any pending events."""
+ fns = None
+ with self._delayed_fns_lock:
+ fns = self._delayed_fns
+ self._delayed_fns = []
+ self._defer_events = False
+ for fn in fns:
+ fn()
+
+ @in_dap_thread
def main_loop(self):
"""The main loop of the DAP server."""
# Before looping, start the thread that writes JSON to the
@@ -371,13 +392,7 @@ class Server:
req = cmd["seq"]
with self.canceller.current_request(req):
self._handle_command(cmd)
- fns = None
- with self._delayed_fns_lock:
- fns = self._delayed_fns
- self._delayed_fns = []
- self.defer_stop_events = False
- for fn in fns:
- fn()
+ self.emit_pending_events()
# Got the terminate request. This is handled by the
# JSON-writing thread, so that we can ensure that all
# responses are flushed to the client before exiting.
@@ -386,13 +401,13 @@ class Server:
send_gdb("quit")
@in_dap_thread
- def send_event_later(self, event, body=None):
- """Send a DAP event back to the client, but only after the
- current request has completed."""
+ def set_defer_events(self):
+ """Defer any events until the current request has completed."""
with self._delayed_fns_lock:
- self._delayed_fns.append(lambda: self.send_event(event, body))
+ self._defer_events = True
- @in_gdb_thread
+ # Note that this does not need to be run in any particular thread,
+ # because it uses locks for thread-safety.
def send_event_maybe_later(self, event, body=None):
"""Send a DAP event back to the client, but if a request is in-flight
within the dap thread and that request is configured to delay the event,
@@ -401,10 +416,10 @@ class Server:
with self.canceller.lock:
if self.canceller.in_flight_dap_thread:
with self._delayed_fns_lock:
- if self.defer_stop_events:
- self._delayed_fns.append(lambda: self.send_event(event, body))
+ if self._defer_events:
+ self._delayed_fns.append(lambda: self._send_event(event, body))
return
- self.send_event(event, body)
+ self._send_event(event, body)
@in_dap_thread
def call_function_later(self, fn):
@@ -415,7 +430,7 @@ class Server:
# Note that this does not need to be run in any particular thread,
# because it just creates an object and writes it to a thread-safe
# queue.
- def send_event(self, event, body=None):
+ def _send_event(self, event, body=None):
"""Send an event to the DAP client.
EVENT is the name of the event, a string.
BODY is the body of the event, an arbitrary object."""
@@ -439,14 +454,6 @@ def send_event(event, body=None):
"""Send an event to the DAP client.
EVENT is the name of the event, a string.
BODY is the body of the event, an arbitrary object."""
- _server.send_event(event, body)
-
-
-def send_event_maybe_later(event, body=None):
- """Send a DAP event back to the client, but if a request is in-flight
- within the dap thread and that request is configured to delay the event,
- wait until the response has been sent until the event is sent back to
- the client."""
_server.send_event_maybe_later(event, body)
@@ -476,7 +483,7 @@ def request(
response: bool = True,
on_dap_thread: bool = False,
expect_stopped: bool = True,
- defer_stop_events: bool = False
+ defer_events: bool = True
):
"""A decorator for DAP requests.
@@ -498,9 +505,9 @@ def request(
inferior is running. When EXPECT_STOPPED is False, the request
will proceed regardless of the inferior's state.
- If DEFER_STOP_EVENTS is True, then make sure any stop events sent
- during the request processing are not sent to the client until the
- response has been sent.
+ If DEFER_EVENTS is True, then make sure any events sent during the
+ request processing are not sent to the client until the response
+ has been sent.
"""
# Validate the parameters.
@@ -523,26 +530,33 @@ def request(
# Verify that the function is run on the correct thread.
if on_dap_thread:
- cmd = in_dap_thread(func)
+ check_cmd = in_dap_thread(func)
else:
func = in_gdb_thread(func)
if response:
- if defer_stop_events:
- if _server is not None:
- with _server.delayed_events_lock:
- _server.defer_stop_events = True
def sync_call(**args):
return send_gdb_with_response(lambda: func(**args))
- cmd = sync_call
+ check_cmd = sync_call
else:
def non_sync_call(**args):
return send_gdb(lambda: func(**args))
- cmd = non_sync_call
+ check_cmd = non_sync_call
+
+ if defer_events:
+
+ def deferring(**args):
+ _server.set_defer_events()
+ return check_cmd(**args)
+
+ cmd = deferring
+
+ else:
+ cmd = check_cmd
# If needed, check that the inferior is not running. This
# wrapping is done last, so the check is done first, before
@@ -582,7 +596,7 @@ def client_bool_capability(name, default=False):
@request("initialize", on_dap_thread=True)
def initialize(**args):
_server.config = args
- _server.send_event_later("initialized")
+ _server.send_event_maybe_later("initialized")
global _lines_start_at_1
_lines_start_at_1 = client_bool_capability("linesStartAt1", True)
global _columns_start_at_1
diff --git a/gdb/python/lib/gdb/dap/sources.py b/gdb/python/lib/gdb/dap/sources.py
index 625c01f..efcd799 100644
--- a/gdb/python/lib/gdb/dap/sources.py
+++ b/gdb/python/lib/gdb/dap/sources.py
@@ -64,9 +64,9 @@ def decode_source(source):
"""Decode a Source object.
Finds and returns the filename of a given Source object."""
- if "path" in source:
- return source["path"]
- if "sourceReference" not in source:
+ if "sourceReference" not in source or source["sourceReference"] <= 0:
+ if "path" in source:
+ return source["path"]
raise DAPException("either 'path' or 'sourceReference' must appear in Source")
ref = source["sourceReference"]
if ref not in _id_map:
diff --git a/gdb/python/lib/gdb/dap/threads.py b/gdb/python/lib/gdb/dap/threads.py
index c271961..89046a8 100644
--- a/gdb/python/lib/gdb/dap/threads.py
+++ b/gdb/python/lib/gdb/dap/threads.py
@@ -16,27 +16,32 @@
import gdb
from .server import request
+from .startup import in_gdb_thread
+@in_gdb_thread
def _thread_name(thr):
if thr.name is not None:
return thr.name
if thr.details is not None:
return thr.details
- return None
+ # Always return a name, as the protocol doesn't allow for nameless
+ # threads. Use the local thread number here... it doesn't matter
+ # without multi-inferior but in that case it might make more
+ # sense.
+ return f"Thread #{thr.num}"
-@request("threads")
+@request("threads", expect_stopped=False)
def threads(**args):
result = []
for thr in gdb.selected_inferior().threads():
- one_result = {
- "id": thr.global_num,
- }
- name = _thread_name(thr)
- if name is not None:
- one_result["name"] = name
- result.append(one_result)
+ result.append(
+ {
+ "id": thr.global_num,
+ "name": _thread_name(thr),
+ }
+ )
return {
"threads": result,
}