From 9006af2ccbc46d18b11253176b21c666f3643cc7 Mon Sep 17 00:00:00 2001 From: Alex Waygood Date: Fri, 24 May 2024 17:02:51 -0400 Subject: [PATCH 1/8] Speedup `import inspect` --- Lib/inspect.py | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/Lib/inspect.py b/Lib/inspect.py index 0eed68d17c702b..04c5a3ac4b6902 100644 --- a/Lib/inspect.py +++ b/Lib/inspect.py @@ -145,7 +145,6 @@ import abc from annotationlib import Format, ForwardRef from annotationlib import get_annotations # re-exported -import ast import dis import collections.abc import enum @@ -153,9 +152,7 @@ import itertools import linecache import os -import re import sys -import tokenize import token import types import functools @@ -163,7 +160,7 @@ from keyword import iskeyword from operator import attrgetter from collections import namedtuple, OrderedDict -from weakref import ref as make_weakref +from _weakref import ref as make_weakref # Create constants for the compiler flags in Include/code.h # We try to get them from dis to avoid duplication @@ -1089,6 +1086,8 @@ def __init__(self): self.body_col0 = None def tokeneater(self, type, token, srowcol, erowcol, line): + import tokenize + if not self.started and not self.indecorator: if type in (tokenize.INDENT, tokenize.COMMENT, tokenize.NL): pass @@ -1139,6 +1138,8 @@ def tokeneater(self, type, token, srowcol, erowcol, line): def getblock(lines): """Extract the block of code at the top of the given list of lines.""" blockfinder = BlockFinder() + import tokenize + try: tokens = tokenize.generate_tokens(iter(lines).__next__) for _token in tokens: @@ -1367,6 +1368,7 @@ def formatannotation(annotation, base_module=None, *, quote_annotation_strings=T def repl(match): text = match.group() return text.removeprefix('typing.') + import re return re.sub(r'[\w\.]+', repl, repr(annotation)) if isinstance(annotation, types.GenericAlias): return str(annotation) @@ -2116,6 +2118,8 @@ def _signature_strip_non_python_syntax(signature): lines = [l.encode('ascii') for l in signature.split('\n') if l] generator = iter(lines).__next__ + + import tokenize token_stream = tokenize.tokenize(generator) text = [] @@ -2151,10 +2155,10 @@ def _signature_fromstr(cls, obj, s, skip_bound_arg=True): """Private helper to parse content of '__text_signature__' and return a Signature based on it. """ - Parameter = cls._parameter_cls + import ast + Parameter = cls._parameter_cls clean_signature, self_parameter = _signature_strip_non_python_syntax(s) - program = "def foo" + clean_signature + ": pass" try: From 408e1ad2b12efc3d99b334fb3722a6debef695be Mon Sep 17 00:00:00 2001 From: Daniel Hollas Date: Thu, 12 Feb 2026 17:03:00 +0000 Subject: [PATCH 2/8] Don't defer ast import While we can easily make ast import lazy in inspect module, it is anyway imported in annotationlib, which cannot be made lazy. --- Lib/inspect.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/Lib/inspect.py b/Lib/inspect.py index 04c5a3ac4b6902..c286b3b2ac0beb 100644 --- a/Lib/inspect.py +++ b/Lib/inspect.py @@ -145,6 +145,7 @@ import abc from annotationlib import Format, ForwardRef from annotationlib import get_annotations # re-exported +import ast import dis import collections.abc import enum @@ -2155,8 +2156,6 @@ def _signature_fromstr(cls, obj, s, skip_bound_arg=True): """Private helper to parse content of '__text_signature__' and return a Signature based on it. """ - import ast - Parameter = cls._parameter_cls clean_signature, self_parameter = _signature_strip_non_python_syntax(s) program = "def foo" + clean_signature + ": pass" From fdacefbb524f0a683cb79bcf2e471862af5b5e45 Mon Sep 17 00:00:00 2001 From: Daniel Hollas Date: Thu, 12 Feb 2026 17:06:08 +0000 Subject: [PATCH 3/8] Use PEP810! --- Lib/inspect.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/Lib/inspect.py b/Lib/inspect.py index c286b3b2ac0beb..5728ec6d96e639 100644 --- a/Lib/inspect.py +++ b/Lib/inspect.py @@ -163,6 +163,9 @@ from collections import namedtuple, OrderedDict from _weakref import ref as make_weakref +lazy import re +lazy import tokenize + # Create constants for the compiler flags in Include/code.h # We try to get them from dis to avoid duplication mod_dict = globals() @@ -1087,8 +1090,6 @@ def __init__(self): self.body_col0 = None def tokeneater(self, type, token, srowcol, erowcol, line): - import tokenize - if not self.started and not self.indecorator: if type in (tokenize.INDENT, tokenize.COMMENT, tokenize.NL): pass @@ -1139,8 +1140,6 @@ def tokeneater(self, type, token, srowcol, erowcol, line): def getblock(lines): """Extract the block of code at the top of the given list of lines.""" blockfinder = BlockFinder() - import tokenize - try: tokens = tokenize.generate_tokens(iter(lines).__next__) for _token in tokens: @@ -1369,7 +1368,6 @@ def formatannotation(annotation, base_module=None, *, quote_annotation_strings=T def repl(match): text = match.group() return text.removeprefix('typing.') - import re return re.sub(r'[\w\.]+', repl, repr(annotation)) if isinstance(annotation, types.GenericAlias): return str(annotation) @@ -2120,7 +2118,6 @@ def _signature_strip_non_python_syntax(signature): lines = [l.encode('ascii') for l in signature.split('\n') if l] generator = iter(lines).__next__ - import tokenize token_stream = tokenize.tokenize(generator) text = [] From b41d4aa411b2a92ae2b0b2c3604daf8082a6ac5d Mon Sep 17 00:00:00 2001 From: Daniel Hollas Date: Thu, 12 Feb 2026 17:33:59 +0000 Subject: [PATCH 4/8] Revert whitespace changes --- Lib/inspect.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/Lib/inspect.py b/Lib/inspect.py index 5728ec6d96e639..1f2f80779d10f2 100644 --- a/Lib/inspect.py +++ b/Lib/inspect.py @@ -2117,7 +2117,6 @@ def _signature_strip_non_python_syntax(signature): lines = [l.encode('ascii') for l in signature.split('\n') if l] generator = iter(lines).__next__ - token_stream = tokenize.tokenize(generator) text = [] @@ -2154,7 +2153,9 @@ def _signature_fromstr(cls, obj, s, skip_bound_arg=True): and return a Signature based on it. """ Parameter = cls._parameter_cls + clean_signature, self_parameter = _signature_strip_non_python_syntax(s) + program = "def foo" + clean_signature + ": pass" try: From 2f7df57daf10e18212cd6509922ff3868b28d209 Mon Sep 17 00:00:00 2001 From: Daniel Hollas Date: Thu, 12 Feb 2026 17:56:35 +0000 Subject: [PATCH 5/8] Add blurb --- .../next/Library/2026-02-12-17-56-17.gh-issue-117865.jE1ema.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 Misc/NEWS.d/next/Library/2026-02-12-17-56-17.gh-issue-117865.jE1ema.rst diff --git a/Misc/NEWS.d/next/Library/2026-02-12-17-56-17.gh-issue-117865.jE1ema.rst b/Misc/NEWS.d/next/Library/2026-02-12-17-56-17.gh-issue-117865.jE1ema.rst new file mode 100644 index 00000000000000..f45f6682869eb1 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2026-02-12-17-56-17.gh-issue-117865.jE1ema.rst @@ -0,0 +1 @@ +Reduce the import time of :mod:`inspect` module by ~20%. From 8d832d0a663c181522403e428b15167d7293c64b Mon Sep 17 00:00:00 2001 From: Daniel Hollas Date: Thu, 12 Feb 2026 17:57:04 +0000 Subject: [PATCH 6/8] Add ensure_lazy_imports test --- Lib/test/test_inspect/test_inspect.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/Lib/test/test_inspect/test_inspect.py b/Lib/test/test_inspect/test_inspect.py index e4a3a7d9add2c2..0f21230e351a56 100644 --- a/Lib/test/test_inspect/test_inspect.py +++ b/Lib/test/test_inspect/test_inspect.py @@ -173,6 +173,15 @@ def __get__(self, instance, owner): return self.func.__get__(instance, owner) +class TestImportTime(unittest.TestCase): + + @cpython_only + def test_lazy_import(self): + import_helper.ensure_lazy_imports( + "inspect", {"re", "tokenize"} + ) + + class TestPredicates(IsTestBase): def test_excluding_predicates(self): From ce1d31e6af265cf23611ffca249fe62c4a5f0cd2 Mon Sep 17 00:00:00 2001 From: Daniel Hollas Date: Thu, 12 Feb 2026 18:17:20 +0000 Subject: [PATCH 7/8] Fix comment --- Lib/inspect.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Lib/inspect.py b/Lib/inspect.py index 1f2f80779d10f2..a1fbaa77ce5aea 100644 --- a/Lib/inspect.py +++ b/Lib/inspect.py @@ -166,7 +166,7 @@ lazy import re lazy import tokenize -# Create constants for the compiler flags in Include/code.h +# Create constants for the compiler flags in Include/cpython/code.h # We try to get them from dis to avoid duplication mod_dict = globals() for k, v in dis.COMPILER_FLAG_NAMES.items(): From 1ce987e9ba7eb87dd4600d825d439bca4c953d4e Mon Sep 17 00:00:00 2001 From: Daniel Hollas Date: Fri, 13 Feb 2026 12:33:44 +0000 Subject: [PATCH 8/8] move laxy imports --- Lib/inspect.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/Lib/inspect.py b/Lib/inspect.py index a1fbaa77ce5aea..5cfa99423a0280 100644 --- a/Lib/inspect.py +++ b/Lib/inspect.py @@ -153,7 +153,9 @@ import itertools import linecache import os +lazy import re import sys +lazy import tokenize import token import types import functools @@ -163,9 +165,6 @@ from collections import namedtuple, OrderedDict from _weakref import ref as make_weakref -lazy import re -lazy import tokenize - # Create constants for the compiler flags in Include/cpython/code.h # We try to get them from dis to avoid duplication mod_dict = globals()