Skip to content

Commit

Permalink
Merge branch 'main' into add-asyncio-event-loop
Browse files Browse the repository at this point in the history
  • Loading branch information
graingert authored Oct 11, 2023
2 parents 6290284 + c523ce0 commit ac27e15
Show file tree
Hide file tree
Showing 34 changed files with 3,359 additions and 3,031 deletions.
24 changes: 0 additions & 24 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -23,30 +23,6 @@ repos:
- id: trailing-whitespace
types_or: [c, inc, python, rst]

- repo: local
hooks:
- id: python-file-whitespace
name: "Check Python file whitespace"
entry: 'python Tools/patchcheck/reindent.py --nobackup --newline LF'
language: 'system'
types: [python]
exclude: '^(Lib/test/tokenizedata/|Tools/c-analyzer/cpython/_parser).*$'

- repo: local
hooks:
- id: c-file-whitespace
name: "Check C file whitespace"
entry: "python Tools/patchcheck/untabify.py"
language: "system"
types_or: ['c', 'c++']
# Don't check the style of vendored libraries
exclude: |
(?x)^(
Modules/_decimal/.*
| Modules/libmpdec/.*
| Modules/expat/.*
)$
- repo: https://github.com/sphinx-contrib/sphinx-lint
rev: v0.6.8
hooks:
Expand Down
21 changes: 14 additions & 7 deletions Lib/pdb.py
Original file line number Diff line number Diff line change
Expand Up @@ -431,8 +431,9 @@ def preloop(self):
# fields are changed to be displayed
if newvalue is not oldvalue and newvalue != oldvalue:
displaying[expr] = newvalue
self.message('display %s: %r [old: %r]' %
(expr, newvalue, oldvalue))
self.message('display %s: %s [old: %s]' %
(expr, self._safe_repr(newvalue, expr),
self._safe_repr(oldvalue, expr)))

def _get_tb_and_exceptions(self, tb_or_exc):
"""
Expand Down Expand Up @@ -1460,7 +1461,7 @@ def do_args(self, arg):
for i in range(n):
name = co.co_varnames[i]
if name in dict:
self.message('%s = %r' % (name, dict[name]))
self.message('%s = %s' % (name, self._safe_repr(dict[name], name)))
else:
self.message('%s = *** undefined ***' % (name,))
do_a = do_args
Expand All @@ -1474,7 +1475,7 @@ def do_retval(self, arg):
self._print_invalid_arg(arg)
return
if '__return__' in self.curframe_locals:
self.message(repr(self.curframe_locals['__return__']))
self.message(self._safe_repr(self.curframe_locals['__return__'], "retval"))
else:
self.error('Not yet returned!')
do_rv = do_retval
Expand Down Expand Up @@ -1509,6 +1510,12 @@ def _msg_val_func(self, arg, func):
except:
self._error_exc()

def _safe_repr(self, obj, expr):
try:
return repr(obj)
except Exception as e:
return _rstr(f"*** repr({expr}) failed: {self._format_exc(e)} ***")

def do_p(self, arg):
"""p expression
Expand Down Expand Up @@ -1688,8 +1695,8 @@ def do_display(self, arg):
if not arg:
if self.displaying:
self.message('Currently displaying:')
for item in self.displaying.get(self.curframe, {}).items():
self.message('%s: %r' % item)
for key, val in self.displaying.get(self.curframe, {}).items():
self.message('%s: %s' % (key, self._safe_repr(val, key)))
else:
self.message('No expression is being displayed')
else:
Expand All @@ -1698,7 +1705,7 @@ def do_display(self, arg):
else:
val = self._getval_except(arg)
self.displaying.setdefault(self.curframe, {})[arg] = val
self.message('display %s: %r' % (arg, val))
self.message('display %s: %s' % (arg, self._safe_repr(val, arg)))

complete_display = _complete_expression

Expand Down
47 changes: 47 additions & 0 deletions Lib/test/test_pdb.py
Original file line number Diff line number Diff line change
Expand Up @@ -2350,6 +2350,53 @@ def test_pdb_ambiguous_statements():
(Pdb) continue
"""

def test_pdb_issue_gh_65052():
"""See GH-65052
args, retval and display should not crash if the object is not displayable
>>> class A:
... def __new__(cls):
... import pdb; pdb.Pdb(nosigint=True, readrc=False).set_trace()
... return object.__new__(cls)
... def __init__(self):
... import pdb; pdb.Pdb(nosigint=True, readrc=False).set_trace()
... self.a = 1
... def __repr__(self):
... return self.a
>>> def test_function():
... A()
>>> with PdbTestInput([ # doctest: +ELLIPSIS +NORMALIZE_WHITESPACE
... 's',
... 'retval',
... 'continue',
... 'args',
... 'display self',
... 'display',
... 'continue',
... ]):
... test_function()
> <doctest test.test_pdb.test_pdb_issue_gh_65052[0]>(4)__new__()
-> return object.__new__(cls)
(Pdb) s
--Return--
> <doctest test.test_pdb.test_pdb_issue_gh_65052[0]>(4)__new__()-><A instance at ...>
-> return object.__new__(cls)
(Pdb) retval
*** repr(retval) failed: AttributeError: 'A' object has no attribute 'a' ***
(Pdb) continue
> <doctest test.test_pdb.test_pdb_issue_gh_65052[0]>(7)__init__()
-> self.a = 1
(Pdb) args
self = *** repr(self) failed: AttributeError: 'A' object has no attribute 'a' ***
(Pdb) display self
display self: *** repr(self) failed: AttributeError: 'A' object has no attribute 'a' ***
(Pdb) display
Currently displaying:
self: *** repr(self) failed: AttributeError: 'A' object has no attribute 'a' ***
(Pdb) continue
"""


@support.requires_subprocess()
class PdbTestCase(unittest.TestCase):
Expand Down
26 changes: 22 additions & 4 deletions Makefile.pre.in
Original file line number Diff line number Diff line change
Expand Up @@ -347,20 +347,36 @@ PEGEN_OBJS= \
Parser/string_parser.o \
Parser/peg_api.o

TOKENIZER_OBJS= \
Parser/lexer/buffer.o \
Parser/lexer/lexer.o \
Parser/lexer/state.o \
Parser/tokenizer/file_tokenizer.o \
Parser/tokenizer/readline_tokenizer.o \
Parser/tokenizer/string_tokenizer.o \
Parser/tokenizer/utf8_tokenizer.o \
Parser/tokenizer/helpers.o

PEGEN_HEADERS= \
$(srcdir)/Include/internal/pycore_parser.h \
$(srcdir)/Parser/pegen.h \
$(srcdir)/Parser/string_parser.h

TOKENIZER_HEADERS= \
Parser/lexer/buffer.h \
Parser/lexer/lexer.h \
Parser/lexer/state.h \
Parser/tokenizer/tokenizer.h \
Parser/tokenizer/helpers.h

POBJS= \
Parser/token.o \

PARSER_OBJS= $(POBJS) $(PEGEN_OBJS) Parser/myreadline.o Parser/tokenizer.o
PARSER_OBJS= $(POBJS) $(PEGEN_OBJS) $(TOKENIZER_OBJS) Parser/myreadline.o

PARSER_HEADERS= \
$(PEGEN_HEADERS) \
$(srcdir)/Parser/tokenizer.h
$(TOKENIZER_HEADERS)

##########################################################################
# Python
Expand Down Expand Up @@ -1060,7 +1076,7 @@ regen-re: $(BUILDPYTHON)
$(RUNSHARED) ./$(BUILDPYTHON) $(srcdir)/Tools/build/generate_re_casefix.py $(srcdir)/Lib/re/_casefix.py

Programs/_testembed: Programs/_testembed.o $(LINK_PYTHON_DEPS)
$(LINKCC) $(PY_LDFLAGS_NOLTO) $(LINKFORSHARED) -o $@ Programs/_testembed.o $(LINK_PYTHON_OBJS) $(LIBS) $(MODLIBS) $(SYSLIBS)
$(LINKCC) $(PY_CORE_LDFLAGS) $(LINKFORSHARED) -o $@ Programs/_testembed.o $(LINK_PYTHON_OBJS) $(LIBS) $(MODLIBS) $(SYSLIBS)

############################################################################
# "Bootstrap Python" used to run deepfreeze.py
Expand Down Expand Up @@ -1161,7 +1177,7 @@ Programs/_freeze_module.o: Programs/_freeze_module.c Makefile
Modules/getpath_noop.o: $(srcdir)/Modules/getpath_noop.c Makefile

Programs/_freeze_module: Programs/_freeze_module.o Modules/getpath_noop.o $(LIBRARY_OBJS_OMIT_FROZEN)
$(LINKCC) $(PY_LDFLAGS_NOLTO) -o $@ Programs/_freeze_module.o Modules/getpath_noop.o $(LIBRARY_OBJS_OMIT_FROZEN) $(LIBS) $(MODLIBS) $(SYSLIBS)
$(LINKCC) $(PY_CORE_LDFLAGS) -o $@ Programs/_freeze_module.o Modules/getpath_noop.o $(LIBRARY_OBJS_OMIT_FROZEN) $(LIBS) $(MODLIBS) $(SYSLIBS)

# We manually freeze getpath.py rather than through freeze_modules
Python/frozen_modules/getpath.h: Modules/getpath.py $(FREEZE_MODULE_BOOTSTRAP_DEPS)
Expand Down Expand Up @@ -1397,6 +1413,8 @@ regen-pegen-metaparser:
.PHONY: regen-pegen
regen-pegen:
@$(MKDIR_P) $(srcdir)/Parser
@$(MKDIR_P) $(srcdir)/Parser/tokenizer
@$(MKDIR_P) $(srcdir)/Parser/lexer
PYTHONPATH=$(srcdir)/Tools/peg_generator $(PYTHON_FOR_REGEN) -m pegen -q c \
$(srcdir)/Grammar/python.gram \
$(srcdir)/Grammar/Tokens \
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
Split the tokenizer into two separate directories:
- One part includes the actual lexeme producing logic and lives in ``Parser/lexer``.
- The second part wraps the lexer according to the different tokenization modes
we have (string, utf-8, file, interactive, readline) and lives in ``Parser/tokenizer``.
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Prevent :mod:`pdb` from crashing when trying to display undisplayable objects
9 changes: 8 additions & 1 deletion PCbuild/_freeze_module.vcxproj
Original file line number Diff line number Diff line change
Expand Up @@ -172,7 +172,14 @@
<ClCompile Include="..\Parser\action_helpers.c" />
<ClCompile Include="..\Parser\string_parser.c" />
<ClCompile Include="..\Parser\token.c" />
<ClCompile Include="..\Parser\tokenizer.c" />
<ClCompile Include="..\Parser\lexer\buffer.c" />
<ClCompile Include="..\Parser\lexer\state.c" />
<ClCompile Include="..\Parser\lexer\lexer.c" />
<ClCompile Include="..\Parser\tokenizer\string_tokenizer.c" />
<ClCompile Include="..\Parser\tokenizer\file_tokenizer.c" />
<ClCompile Include="..\Parser\tokenizer\utf8_tokenizer.c" />
<ClCompile Include="..\Parser\tokenizer\readline_tokenizer.c" />
<ClCompile Include="..\Parser\tokenizer\helpers.c" />
<ClCompile Include="..\PC\invalid_parameter_handler.c" />
<ClCompile Include="..\PC\msvcrtmodule.c" />
<ClCompile Include="..\PC\winreg.c" />
Expand Down
23 changes: 22 additions & 1 deletion PCbuild/_freeze_module.vcxproj.filters
Original file line number Diff line number Diff line change
Expand Up @@ -397,7 +397,28 @@
<ClCompile Include="..\Parser\token.c">
<Filter>Source Files</Filter>
</ClCompile>
<ClCompile Include="..\Parser\tokenizer.c">
<ClCompile Include="..\Parser\lexer\lexer.c">
<Filter>Source Files</Filter>
</ClCompile>
<ClCompile Include="..\Parser\lexer\buffer.c">
<Filter>Source Files</Filter>
</ClCompile>
<ClCompile Include="..\Parser\lexer\state.c">
<Filter>Source Files</Filter>
</ClCompile>
<ClCompile Include="..\Parser\tokenizer\string_tokenizer.c">
<Filter>Source Files</Filter>
</ClCompile>
<ClCompile Include="..\Parser\tokenizer\utf8_tokenizer.c">
<Filter>Source Files</Filter>
</ClCompile>
<ClCompile Include="..\Parser\tokenizer\file_tokenizer.c">
<Filter>Source Files</Filter>
</ClCompile>
<ClCompile Include="..\Parser\tokenizer\readline_tokenizer.c">
<Filter>Source Files</Filter>
</ClCompile>
<ClCompile Include="..\Parser\tokenizer\helpers.c">
<Filter>Source Files</Filter>
</ClCompile>
<ClCompile Include="..\Python\traceback.c">
Expand Down
15 changes: 13 additions & 2 deletions PCbuild/pythoncore.vcxproj
Original file line number Diff line number Diff line change
Expand Up @@ -362,7 +362,11 @@
<ClInclude Include="..\Objects\stringlib\replace.h" />
<ClInclude Include="..\Objects\stringlib\split.h" />
<ClInclude Include="..\Objects\unicodetype_db.h" />
<ClInclude Include="..\Parser\tokenizer.h" />
<ClInclude Include="..\Parser\lexer\state.h" />
<ClInclude Include="..\Parser\lexer\lexer.h" />
<ClInclude Include="..\Parser\lexer\buffer.h" />
<ClInclude Include="..\Parser\tokenizer\helpers.h" />
<ClInclude Include="..\Parser\tokenizer\tokenizer.h" />
<ClInclude Include="..\Parser\string_parser.h" />
<ClInclude Include="..\Parser\pegen.h" />
<ClInclude Include="..\PC\errmap.h" />
Expand Down Expand Up @@ -507,7 +511,14 @@
<ClCompile Include="..\Objects\unionobject.c" />
<ClCompile Include="..\Objects\weakrefobject.c" />
<ClCompile Include="..\Parser\myreadline.c" />
<ClCompile Include="..\Parser\tokenizer.c" />
<ClCompile Include="..\Parser\lexer\state.c" />
<ClCompile Include="..\Parser\lexer\lexer.c" />
<ClCompile Include="..\Parser\lexer\buffer.c" />
<ClCompile Include="..\Parser\tokenizer\string_tokenizer.c" />
<ClCompile Include="..\Parser\tokenizer\file_tokenizer.c" />
<ClCompile Include="..\Parser\tokenizer\utf8_tokenizer.c" />
<ClCompile Include="..\Parser\tokenizer\readline_tokenizer.c" />
<ClCompile Include="..\Parser\tokenizer\helpers.c" />
<ClCompile Include="..\Parser\token.c" />
<ClCompile Include="..\Parser\pegen.c" />
<ClCompile Include="..\Parser\pegen_errors.c" />
Expand Down
37 changes: 35 additions & 2 deletions PCbuild/pythoncore.vcxproj.filters
Original file line number Diff line number Diff line change
Expand Up @@ -291,7 +291,19 @@
<ClInclude Include="..\Objects\unicodetype_db.h">
<Filter>Objects</Filter>
</ClInclude>
<ClInclude Include="..\Parser\tokenizer.h">
<ClInclude Include="..\Parser\lexer\lexer.h">
<Filter>Parser</Filter>
</ClInclude>
<ClInclude Include="..\Parser\lexer\state.h">
<Filter>Parser</Filter>
</ClInclude>
<ClInclude Include="..\Parser\lexer\buffer.h">
<Filter>Parser</Filter>
</ClInclude>
<ClInclude Include="..\Parser\tokenizer\tokenizer.h">
<Filter>Parser</Filter>
</ClInclude>
<ClInclude Include="..\Parser\tokenizer\helpers.h">
<Filter>Parser</Filter>
</ClInclude>
<ClInclude Include="..\PC\errmap.h">
Expand Down Expand Up @@ -1139,7 +1151,28 @@
<ClCompile Include="..\Parser\myreadline.c">
<Filter>Parser</Filter>
</ClCompile>
<ClCompile Include="..\Parser\tokenizer.c">
<ClCompile Include="..\Parser\lexer\lexer.c">
<Filter>Parser</Filter>
</ClCompile>
<ClCompile Include="..\Parser\lexer\state.c">
<Filter>Parser</Filter>
</ClCompile>
<ClCompile Include="..\Parser\lexer\buffer.c">
<Filter>Parser</Filter>
</ClCompile>
<ClCompile Include="..\Parser\tokenizer\string_tokenizer.c">
<Filter>Parser</Filter>
</ClCompile>
<ClCompile Include="..\Parser\tokenizer\file_tokenizer.c">
<Filter>Parser</Filter>
</ClCompile>
<ClCompile Include="..\Parser\tokenizer\utf8_tokenizer.c">
<Filter>Parser</Filter>
</ClCompile>
<ClCompile Include="..\Parser\tokenizer\readline_tokenizer.c">
<Filter>Parser</Filter>
</ClCompile>
<ClCompile Include="..\Parser\tokenizer\helpers.c">
<Filter>Parser</Filter>
</ClCompile>
<ClCompile Include="..\Parser\token.c">
Expand Down
1 change: 0 additions & 1 deletion Parser/action_helpers.c
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
#include <Python.h>

#include "pegen.h"
#include "tokenizer.h"
#include "string_parser.h"
#include "pycore_runtime.h" // _PyRuntime

Expand Down
Loading

0 comments on commit ac27e15

Please sign in to comment.