diff --git a/line_profiler/__main__.py b/line_profiler/__main__.py index c626c205..33831ea3 100644 --- a/line_profiler/__main__.py +++ b/line_profiler/__main__.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from .line_profiler import main if __name__ == '__main__': diff --git a/line_profiler/__main__.pyi b/line_profiler/__main__.pyi deleted file mode 100644 index 8b137891..00000000 --- a/line_profiler/__main__.pyi +++ /dev/null @@ -1 +0,0 @@ - diff --git a/line_profiler/autoprofile/ast_profile_transformer.py b/line_profiler/autoprofile/ast_profile_transformer.py index 1f4655d8..f9270e00 100644 --- a/line_profiler/autoprofile/ast_profile_transformer.py +++ b/line_profiler/autoprofile/ast_profile_transformer.py @@ -1,7 +1,12 @@ +from __future__ import annotations + import ast +from typing import Union -def ast_create_profile_node(modname, profiler_name='profile', attr='add_imported_function_or_module'): +def ast_create_profile_node( + modname: str, profiler_name: str = 'profile', + attr: str = 'add_imported_function_or_module') -> ast.Expr: """Create an abstract syntax tree node that adds an object to the profiler to be profiled. An abstract syntax tree node is created which calls the attr method from profile and @@ -45,7 +50,9 @@ class AstProfileTransformer(ast.NodeTransformer): immediately after the import. """ - def __init__(self, profile_imports=False, profiled_imports=None, profiler_name='profile'): + def __init__(self, profile_imports: bool = False, + profiled_imports: list[str] | None = None, + profiler_name: str = 'profile') -> None: """Initializes the AST transformer with the profiler name. Args: @@ -63,7 +70,9 @@ def __init__(self, profile_imports=False, profiled_imports=None, profiler_name=' self._profiled_imports = profiled_imports if profiled_imports is not None else [] self._profiler_name = profiler_name - def _visit_func_def(self, node): + def _visit_func_def( + self, node: ast.FunctionDef | ast.AsyncFunctionDef + ) -> ast.FunctionDef | ast.AsyncFunctionDef: """Decorate functions/methods with profiler. Checks if the function/method already has a profile_name decorator, if not, it will append @@ -91,7 +100,10 @@ def _visit_func_def(self, node): visit_FunctionDef = visit_AsyncFunctionDef = _visit_func_def - def _visit_import(self, node): + def _visit_import( + self, node: ast.Import | ast.ImportFrom + ) -> (ast.Import | ast.ImportFrom + | list[ast.Import | ast.ImportFrom | ast.Expr]): """Add a node that profiles an import If profile_imports is True and the import is not in profiled_imports, @@ -121,7 +133,9 @@ def _visit_import(self, node): visited.append(expr) return visited - def visit_Import(self, node): + def visit_Import( + self, node: ast.Import + ) -> ast.Import | list[ast.Import | ast.Expr]: """Add a node that profiles an object imported using the "import foo" sytanx Args: @@ -137,7 +151,9 @@ def visit_Import(self, node): """ return self._visit_import(node) - def visit_ImportFrom(self, node): + def visit_ImportFrom( + self, node: ast.ImportFrom + ) -> ast.ImportFrom | list[ast.ImportFrom | ast.Expr]: """Add a node that profiles an object imported using the "from foo import bar" syntax Args: diff --git a/line_profiler/autoprofile/ast_profile_transformer.pyi b/line_profiler/autoprofile/ast_profile_transformer.pyi deleted file mode 100644 index 9d64182c..00000000 --- a/line_profiler/autoprofile/ast_profile_transformer.pyi +++ /dev/null @@ -1,36 +0,0 @@ -from typing import List -import _ast -import ast -from typing import Union - - -def ast_create_profile_node(modname, - profiler_name: str = ..., - attr: str = ...) -> (_ast.Expr): - ... - - -class AstProfileTransformer(ast.NodeTransformer): - - def __init__(self, - profile_imports: bool = False, - profiled_imports: List[str] | None = None, - profiler_name: str = 'profile') -> None: - ... - - def visit_FunctionDef(self, node: _ast.FunctionDef) -> (_ast.FunctionDef): - ... - - def visit_AsyncFunctionDef( - self, node: _ast.AsyncFunctionDef) -> (_ast.AsyncFunctionDef): - ... - - def visit_Import( - self, node: _ast.Import - ) -> (Union[_ast.Import, List[Union[_ast.Import, _ast.Expr]]]): - ... - - def visit_ImportFrom( - self, node: _ast.ImportFrom - ) -> (Union[_ast.ImportFrom, List[Union[_ast.ImportFrom, _ast.Expr]]]): - ... diff --git a/line_profiler/autoprofile/ast_tree_profiler.py b/line_profiler/autoprofile/ast_tree_profiler.py index 892ebfbc..994074f6 100644 --- a/line_profiler/autoprofile/ast_tree_profiler.py +++ b/line_profiler/autoprofile/ast_tree_profiler.py @@ -1,5 +1,8 @@ +from __future__ import annotations + import ast import os +from typing import Type from .ast_profile_transformer import (AstProfileTransformer, ast_create_profile_node) @@ -20,11 +23,11 @@ class AstTreeProfiler: """ def __init__(self, - script_file, - prof_mod, - profile_imports, - ast_transformer_class_handler=AstProfileTransformer, - profmod_extractor_class_handler=ProfmodExtractor): + script_file: str, + prof_mod: list[str], + profile_imports: bool, + ast_transformer_class_handler: Type = AstProfileTransformer, + profmod_extractor_class_handler: Type = ProfmodExtractor) -> None: """Initializes the AST tree profiler instance with the script file path Args: @@ -52,7 +55,8 @@ def __init__(self, self._profmod_extractor_class_handler = profmod_extractor_class_handler @staticmethod - def _check_profile_full_script(script_file, prof_mod): + def _check_profile_full_script( + script_file: str, prof_mod: list[str]) -> bool: """Check whether whole script should be profiled. Checks whether path to script has been passed to prof_mod indicating that @@ -76,7 +80,7 @@ def _check_profile_full_script(script_file, prof_mod): return profile_full_script @staticmethod - def _get_script_ast_tree(script_file): + def _get_script_ast_tree(script_file: str) -> ast.Module: """Generate an abstract syntax from a script file. Args: @@ -93,10 +97,10 @@ def _get_script_ast_tree(script_file): return tree def _profile_ast_tree(self, - tree, - tree_imports_to_profile_dict, - profile_full_script=False, - profile_imports=False): + tree: ast.Module, + tree_imports_to_profile_dict: dict[int, str], + profile_full_script: bool = False, + profile_imports: bool = False) -> ast.Module: """Add profiling to an abstract syntax tree. Adds nodes to the AST that adds the specified objects to the profiler. @@ -139,7 +143,7 @@ def _profile_ast_tree(self, ast.fix_missing_locations(tree) return tree - def profile(self): + def profile(self) -> ast.Module: """Create an abstract syntax tree of a script and add profiling to it. Reads a script file and generates an abstract syntax tree. diff --git a/line_profiler/autoprofile/ast_tree_profiler.pyi b/line_profiler/autoprofile/ast_tree_profiler.pyi deleted file mode 100644 index fc533e86..00000000 --- a/line_profiler/autoprofile/ast_tree_profiler.pyi +++ /dev/null @@ -1,23 +0,0 @@ -from typing import List -from typing import Type -import _ast - -from .ast_profile_transformer import AstProfileTransformer -from .profmod_extractor import ProfmodExtractor - -__docstubs__: str - - -class AstTreeProfiler: - - def __init__( - self, - script_file: str, - prof_mod: List[str], - profile_imports: bool, - ast_transformer_class_handler: Type = AstProfileTransformer, - profmod_extractor_class_handler: Type = ProfmodExtractor) -> None: - ... - - def profile(self) -> (_ast.Module): - ... diff --git a/line_profiler/autoprofile/autoprofile.py b/line_profiler/autoprofile/autoprofile.py index 5985a84b..1442e339 100644 --- a/line_profiler/autoprofile/autoprofile.py +++ b/line_profiler/autoprofile/autoprofile.py @@ -44,12 +44,15 @@ def main(): python -m kernprof -p demo.py -l demo.py python -m line_profiler -rmt demo.py.lprof """ +from __future__ import annotations import contextlib import functools import importlib.util import operator import sys import types +from collections.abc import MutableMapping +from typing import Any from .ast_tree_profiler import AstTreeProfiler from .run_module import AstTreeModuleProfiler from .line_profiler_utils import add_imported_function_or_module @@ -58,7 +61,7 @@ def main(): PROFILER_LOCALS_NAME = 'prof' -def _extend_line_profiler_for_profiling_imports(prof): +def _extend_line_profiler_for_profiling_imports(prof: Any) -> None: """Allow profiler to handle functions/methods, classes & modules with a single call. Add a method to LineProfiler that can identify whether the object is a @@ -73,7 +76,9 @@ def _extend_line_profiler_for_profiling_imports(prof): prof.add_imported_function_or_module = types.MethodType(add_imported_function_or_module, prof) -def run(script_file, ns, prof_mod, profile_imports=False, as_module=False): +def run(script_file: str, ns: MutableMapping[str, Any], + prof_mod: list[str], profile_imports: bool = False, + as_module: bool = False) -> None: """Automatically profile a script and run it. Profile functions, classes & modules specified in prof_mod without needing to add @@ -98,10 +103,10 @@ def run(script_file, ns, prof_mod, profile_imports=False, as_module=False): Whether we're running script_file as a module """ class restore_dict: - def __init__(self, d, target=None): + def __init__(self, d: MutableMapping[str, Any], target=None): self.d = d self.target = target - self.copy = None + self.copy: dict[str, Any] | None = None def __enter__(self): assert self.copy is None diff --git a/line_profiler/autoprofile/autoprofile.pyi b/line_profiler/autoprofile/autoprofile.pyi deleted file mode 100644 index 65ddbf2b..00000000 --- a/line_profiler/autoprofile/autoprofile.pyi +++ /dev/null @@ -1,11 +0,0 @@ -from typing import List - -PROFILER_LOCALS_NAME: str - - -def run(script_file: str, - ns: dict, - prof_mod: List[str], - profile_imports: bool = False, - as_module: bool = False) -> None: - ... diff --git a/line_profiler/autoprofile/eager_preimports.py b/line_profiler/autoprofile/eager_preimports.py index 1e8444df..461f2606 100644 --- a/line_profiler/autoprofile/eager_preimports.py +++ b/line_profiler/autoprofile/eager_preimports.py @@ -2,16 +2,18 @@ Tools for eagerly pre-importing everything as specified in ``line_profiler.autoprof.run(prof_mod=...)``. """ +from __future__ import annotations + import ast import functools import itertools -from collections import namedtuple from collections.abc import Collection from keyword import iskeyword from importlib.util import find_spec from pkgutil import walk_packages from textwrap import dedent, indent as indent_ from warnings import warn +from typing import Any, Generator, NamedTuple, TextIO from .util_static import ( modname_to_modpath, modpath_to_modname, package_modpaths) @@ -20,7 +22,7 @@ 'resolve_profiling_targets', 'write_eager_import_module') -def is_dotted_path(obj): +def is_dotted_path(obj: Any) -> bool: """ Example: >>> assert not is_dotted_path(object()) @@ -37,7 +39,7 @@ def is_dotted_path(obj): return True -def get_expression(obj): +def get_expression(obj: Any) -> ast.Expression | None: """ Example: >>> assert not get_expression(object()) @@ -55,7 +57,8 @@ def get_expression(obj): return None -def split_dotted_path(dotted_path, static=True): +def split_dotted_path( + dotted_path: str, static: bool = True) -> tuple[str, str | None]: """ Arguments: dotted_path (str): @@ -133,7 +136,7 @@ def split_dotted_path(dotted_path, static=True): f'module: {checked_locs!r}') -def strip(s): +def strip(s: str) -> str: return dedent(s).strip('\n') @@ -163,18 +166,20 @@ class LoadedNameFinder(ast.NodeVisitor): >>> names = LoadedNameFinder.find(ast.parse(module)) >>> assert names == {'bar', 'foobar', 'a', 'str'}, names """ - def __init__(self): - self.names = set() - self.contexts = [] + def __init__(self) -> None: + self.names: set[str] = set() + self.contexts: list[set[str]] = [] - def visit_Name(self, node): + def visit_Name(self, node: ast.Name) -> None: if not isinstance(node.ctx, ast.Load): return name = node.id if not any(name in ctx for ctx in self.contexts): self.names.add(node.id) - def _visit_func_def(self, node): + def _visit_func_def( + self, node: ast.FunctionDef | ast.AsyncFunctionDef | ast.Lambda + ) -> None: args = node.args arg_names = { arg.arg @@ -191,13 +196,13 @@ def _visit_func_def(self, node): visit_FunctionDef = visit_AsyncFunctionDef = visit_Lambda = _visit_func_def @classmethod - def find(cls, node): + def find(cls, node: ast.AST) -> set[str]: finder = cls() finder.visit(node) return finder.names -def propose_names(prefixes): +def propose_names(prefixes: Collection[str]) -> Generator[str, None, None]: """ Generate names based on prefixes. @@ -235,7 +240,9 @@ def propose_names(prefixes): yield pattern(prefix, i) -def resolve_profiling_targets(dotted_paths, static=True, recurse=False): +def resolve_profiling_targets( + dotted_paths: Collection[str], static: bool = True, + recurse: Collection[str] | bool = False) -> ResolvedResult: """ Arguments: dotted_paths (Collection[str]): @@ -327,11 +334,12 @@ def walk_packages_import_sys(pkg): return ResolvedResult(all_targets, indirect_submods, unknown_locs) -def write_eager_import_module(dotted_paths, stream=None, *, - static=True, - recurse=False, - adder='profile.add_imported_function_or_module', - indent=' '): +def write_eager_import_module( + dotted_paths: Collection[str], stream: TextIO | None = None, *, + static: bool = True, + recurse: Collection[str] | bool = False, + adder: str = 'profile.add_imported_function_or_module', + indent: str = ' ') -> None: r""" Write a module which autoprofiles all its imports. @@ -564,5 +572,7 @@ def write_eager_import_module(dotted_paths, stream=None, *, """)) -ResolvedResult = namedtuple('ResolvedResult', - ('targets', 'indirect', 'unresolved')) +class ResolvedResult(NamedTuple): + targets: dict[str, set[str | None]] + indirect: set[str] + unresolved: list[str] diff --git a/line_profiler/autoprofile/eager_preimports.pyi b/line_profiler/autoprofile/eager_preimports.pyi deleted file mode 100644 index 756a6b7b..00000000 --- a/line_profiler/autoprofile/eager_preimports.pyi +++ /dev/null @@ -1,67 +0,0 @@ -import ast -from typing import ( - Any, Union, - Collection, Dict, Generator, List, NamedTuple, Set, Tuple, - TextIO) - - -def is_dotted_path(obj: Any) -> bool: - ... - - -def get_expression(obj: Any) -> Union[ast.Expression, None]: - ... - - -def split_dotted_path( - dotted_path: str, static: bool = True) -> Tuple[str, Union[str, None]]: - ... - - -def strip(s: str) -> str: - ... - - -class LoadedNameFinder(ast.NodeVisitor): - names: Set[str] - contexts: List[Set[str]] - - def visit_Name(self, node: ast.Name) -> None: - ... - - def visit_FunctionDef(self, - node: Union[ast.FunctionDef, ast.AsyncFunctionDef, - ast.Lambda]) -> None: - ... - - visit_AsyncFunctionDef = visit_Lambda = visit_FunctionDef - - @classmethod - def find(cls, node: ast.AST) -> Set[str]: - ... - - -def propose_names(prefixes: Collection[str]) -> Generator[str, None, None]: - ... - - -def resolve_profiling_targets( - dotted_paths: Collection[str], - static: bool = True, - recurse: Union[Collection[str], bool] = False) -> 'ResolvedResult': - ... - - -def write_eager_import_module( - dotted_paths: Collection[str], stream: Union[TextIO, None] = None, *, - static: bool = True, - recurse: Union[Collection[str], bool] = False, - adder: str = 'profile.add_imported_function_or_module', - indent: str = ' ') -> None: - ... - - -class ResolvedResult(NamedTuple): - targets: Dict[str, Set[Union[str, None]]] - indirect: Set[str] - unresolved: List[str] diff --git a/line_profiler/autoprofile/line_profiler_utils.py b/line_profiler/autoprofile/line_profiler_utils.py index c4e736d1..fff7d43e 100644 --- a/line_profiler/autoprofile/line_profiler_utils.py +++ b/line_profiler/autoprofile/line_profiler_utils.py @@ -1,8 +1,38 @@ +from __future__ import annotations + import inspect +from functools import cached_property, partial, partialmethod +from types import FunctionType, MethodType, ModuleType +from typing import TYPE_CHECKING, Any, Literal, overload + +if TYPE_CHECKING: # pragma: no cover + from ..profiler_mixin import CLevelCallable, CythonCallable + from ..scoping_policy import ScopingPolicy, ScopingPolicyDict + + +@overload +def add_imported_function_or_module( + self, item: CLevelCallable | Any, *, + scoping_policy: ScopingPolicy | str | ScopingPolicyDict | None = None, + wrap: bool = False) -> Literal[0]: + ... + + +@overload +def add_imported_function_or_module( + self, + item: (FunctionType | CythonCallable | type | partial | property + | cached_property | MethodType | staticmethod | classmethod + | partialmethod | ModuleType), + *, scoping_policy: ScopingPolicy | str | ScopingPolicyDict | None = None, + wrap: bool = False) -> Literal[0, 1]: + ... -def add_imported_function_or_module(self, item, *, - scoping_policy=None, wrap=False): +def add_imported_function_or_module( + self, item: object, *, + scoping_policy: ScopingPolicy | str | ScopingPolicyDict | None = None, + wrap: bool = False) -> Literal[0, 1]: """ Method to add an object to :py:class:`~.line_profiler.LineProfiler` to be profiled. diff --git a/line_profiler/autoprofile/line_profiler_utils.pyi b/line_profiler/autoprofile/line_profiler_utils.pyi deleted file mode 100644 index 2d114b34..00000000 --- a/line_profiler/autoprofile/line_profiler_utils.pyi +++ /dev/null @@ -1,29 +0,0 @@ -from functools import partial, partialmethod, cached_property -from types import FunctionType, MethodType, ModuleType -from typing import overload, Any, Literal, TypeVar, TYPE_CHECKING - -if TYPE_CHECKING: # Stub-only annotations - from ..profiler_mixin import CLevelCallable, CythonCallable - from ..scoping_policy import ScopingPolicy, ScopingPolicyDict - - - - -@overload -def add_imported_function_or_module( - self, item: CLevelCallable | Any, - scoping_policy: ScopingPolicy | str | ScopingPolicyDict | None = None, - wrap: bool = False) -> Literal[0]: - ... - - -@overload -def add_imported_function_or_module( - self, - item: (FunctionType | CythonCallable - | type | partial | property | cached_property - | MethodType | staticmethod | classmethod | partialmethod - | ModuleType), - scoping_policy: ScopingPolicy | str | ScopingPolicyDict | None = None, - wrap: bool = False) -> Literal[0, 1]: - ... diff --git a/line_profiler/autoprofile/profmod_extractor.py b/line_profiler/autoprofile/profmod_extractor.py index 929a1e9e..840cbd1c 100644 --- a/line_profiler/autoprofile/profmod_extractor.py +++ b/line_profiler/autoprofile/profmod_extractor.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import ast import os import sys @@ -13,7 +15,8 @@ class ProfmodExtractor: abstract syntax tree. """ - def __init__(self, tree, script_file, prof_mod): + def __init__(self, tree: ast.Module, script_file: str, + prof_mod: list[str]) -> None: """Initializes the AST tree profiler instance with the AST, script file path and prof_mod Args: @@ -33,7 +36,7 @@ def __init__(self, tree, script_file, prof_mod): self._prof_mod = prof_mod @staticmethod - def _is_path(text): + def _is_path(text: str) -> bool: """Check whether a string is a path. Checks if a string contains a slash or ends with .py indicating it is a path. @@ -50,7 +53,8 @@ def _is_path(text): return ret @classmethod - def _get_modnames_to_profile_from_prof_mod(cls, script_file, prof_mod): + def _get_modnames_to_profile_from_prof_mod( + cls, script_file: str, prof_mod: list[str]) -> list[str]: """Grab the valid paths and all dotted paths in prof_mod and their subpackages and submodules, in the form of dotted paths. @@ -127,7 +131,8 @@ def _get_modnames_to_profile_from_prof_mod(cls, script_file, prof_mod): return modnames_to_profile @staticmethod - def _ast_get_imports_from_tree(tree): + def _ast_get_imports_from_tree( + tree: ast.Module) -> list[dict[str, str | int | None]]: """Get all imports in an abstract syntax tree. Args: @@ -174,7 +179,10 @@ def _ast_get_imports_from_tree(tree): return module_dict_list @staticmethod - def _find_modnames_in_tree_imports(modnames_to_profile, module_dict_list): + def _find_modnames_in_tree_imports( + modnames_to_profile: list[str], + module_dict_list: list[dict[str, str | int | None]] + ) -> dict[int, str]: """Map modnames to imports from an abstract sytax tree. Find imports in modue_dict_list, created from an abstract syntax tree, that match @@ -214,7 +222,7 @@ def _find_modnames_in_tree_imports(modnames_to_profile, module_dict_list): modnames_found_in_tree[module_dict['tree_index']] = name return modnames_found_in_tree - def run(self): + def run(self) -> dict[int, str]: """Map prof_mod to imports in an abstract syntax tree. Takes the paths and dotted paths in prod_mod and finds their respective imports in an diff --git a/line_profiler/autoprofile/profmod_extractor.pyi b/line_profiler/autoprofile/profmod_extractor.pyi deleted file mode 100644 index ebaf7526..00000000 --- a/line_profiler/autoprofile/profmod_extractor.pyi +++ /dev/null @@ -1,13 +0,0 @@ -import _ast -from typing import List -from typing import Dict - - -class ProfmodExtractor: - - def __init__(self, tree: _ast.Module, script_file: str, - prof_mod: List[str]) -> None: - ... - - def run(self) -> (Dict[int, str]): - ... diff --git a/line_profiler/autoprofile/run_module.py b/line_profiler/autoprofile/run_module.py index f4461409..3490c221 100644 --- a/line_profiler/autoprofile/run_module.py +++ b/line_profiler/autoprofile/run_module.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import ast import os @@ -5,7 +7,7 @@ from .util_static import modname_to_modpath, modpath_to_modname -def get_module_from_importfrom(node, module): +def get_module_from_importfrom(node: ast.ImportFrom, module: str) -> str: r"""Resolve the full path of a relative import. Args: @@ -53,10 +55,10 @@ def get_module_from_importfrom(node, module): class ImportFromTransformer(ast.NodeTransformer): """Turn all the relative imports into absolute imports.""" - def __init__(self, module): + def __init__(self, module: str) -> None: self.module = module - def visit_ImportFrom(self, node): + def visit_ImportFrom(self, node: ast.ImportFrom) -> ast.ImportFrom: level = node.level if not level: return self.generic_visit(node) @@ -74,7 +76,7 @@ class AstTreeModuleProfiler(AstTreeProfiler): classes & modules in prof_mod to the profiler to be profiled. """ @classmethod - def _get_script_ast_tree(cls, script_file): + def _get_script_ast_tree(cls, script_file: str) -> ast.Module: tree = super()._get_script_ast_tree(script_file) # Note: don't drop the `.__init__` or `.__main__` suffix, lest # the relative imports fail @@ -83,11 +85,12 @@ def _get_script_ast_tree(cls, script_file): return ImportFromTransformer(module).visit(tree) @staticmethod - def _is_main(fname): + def _is_main(fname: str) -> bool: return os.path.basename(fname) == '__main__.py' @classmethod - def _check_profile_full_script(cls, script_file, prof_mod): + def _check_profile_full_script( + cls, script_file: str, prof_mod: list[str]) -> bool: rp = os.path.realpath paths_to_check = {rp(script_file)} if cls._is_main(script_file): diff --git a/line_profiler/autoprofile/run_module.pyi b/line_profiler/autoprofile/run_module.pyi deleted file mode 100644 index 6d63a6d5..00000000 --- a/line_profiler/autoprofile/run_module.pyi +++ /dev/null @@ -1,21 +0,0 @@ -import ast - -from .ast_tree_profiler import AstTreeProfiler - - -def get_module_from_importfrom(node: ast.ImportFrom, module: str) -> str: - ... - - -class ImportFromTransformer(ast.NodeTransformer): - def __init__(self, module: str) -> None: - ... - - def visit_ImportFrom(self, node: ast.ImportFrom) -> ast.ImportFrom: - ... - - module: str - - -class AstTreeModuleProfiler(AstTreeProfiler): - ... diff --git a/line_profiler/autoprofile/util_static.py b/line_profiler/autoprofile/util_static.py index 117eef4e..8113dc78 100644 --- a/line_profiler/autoprofile/util_static.py +++ b/line_profiler/autoprofile/util_static.py @@ -3,6 +3,7 @@ :py:mod:`xdoctest` via dev/maintain/port_utilities.py in the line_profiler repo. """ +from __future__ import annotations from os.path import abspath from os.path import dirname @@ -18,19 +19,22 @@ from os.path import isfile from os.path import realpath import sys +from os import PathLike +from collections.abc import Generator +from typing import Any # from xdoctest import utils def package_modpaths( - pkgpath, - with_pkg=False, - with_mod=True, - followlinks=True, - recursive=True, - with_libs=False, - check=True, -): + pkgpath: str, + with_pkg: bool = False, + with_mod: bool = True, + followlinks: bool = True, + recursive: bool = True, + with_libs: bool = False, + check: bool = True, +) -> Generator[Any, None, None]: r""" Finds sub-packages and sub-modules belonging to a package. @@ -419,7 +423,11 @@ def check_dpath(dpath): return found_modpath -def modname_to_modpath(modname, hide_init=True, hide_main=False, sys_path=None): +def modname_to_modpath( + modname: str, + hide_init: bool = True, + hide_main: bool = False, + sys_path: list[str | PathLike] | None = None) -> str | None: """ Finds the path to a python module from its name. @@ -470,7 +478,7 @@ def modname_to_modpath(modname, hide_init=True, hide_main=False, sys_path=None): return modpath -def split_modpath(modpath, check=True): +def split_modpath(modpath: str, check: bool = True) -> tuple[str, str]: """ Splits the modpath into the dir that must be in PYTHONPATH for the module to be imported and the modulepath relative to this directory. @@ -514,7 +522,9 @@ def split_modpath(modpath, check=True): return (dpath, rel_modpath) -def normalize_modpath(modpath, hide_init=True, hide_main=False): +def normalize_modpath( + modpath: str | PathLike, hide_init: bool = True, + hide_main: bool = False) -> str | PathLike: """ Normalizes __init__ and __main__ paths. @@ -567,8 +577,12 @@ def normalize_modpath(modpath, hide_init=True, hide_main=False): def modpath_to_modname( - modpath, hide_init=True, hide_main=False, check=True, relativeto=None -): + modpath: str, + hide_init: bool = True, + hide_main: bool = False, + check: bool = True, + relativeto: str | None = None, +) -> str: """ Determines importable name from file path diff --git a/line_profiler/autoprofile/util_static.pyi b/line_profiler/autoprofile/util_static.pyi deleted file mode 100644 index 42ccd84e..00000000 --- a/line_profiler/autoprofile/util_static.pyi +++ /dev/null @@ -1,41 +0,0 @@ -from typing import List -from os import PathLike -from typing import Tuple -from collections.abc import Generator -from typing import Any - - -def package_modpaths(pkgpath, - with_pkg: bool = ..., - with_mod: bool = ..., - followlinks: bool = ..., - recursive: bool = ..., - with_libs: bool = ..., - check: bool = ...) -> Generator[Any, None, None]: - ... - - -def modname_to_modpath( - modname: str, - hide_init: bool = True, - hide_main: bool = False, - sys_path: None | List[str | PathLike] = None) -> str | None: - ... - - -def split_modpath(modpath: str, check: bool = True) -> Tuple[str, str]: - ... - - -def normalize_modpath(modpath: str | PathLike, - hide_init: bool = True, - hide_main: bool = False) -> str | PathLike: - ... - - -def modpath_to_modname(modpath: str, - hide_init: bool = True, - hide_main: bool = False, - check: bool = True, - relativeto: str | None = None) -> str: - ... diff --git a/line_profiler/cli_utils.py b/line_profiler/cli_utils.py index 6344b6fa..b1fe4184 100644 --- a/line_profiler/cli_utils.py +++ b/line_profiler/cli_utils.py @@ -2,12 +2,16 @@ Shared utilities between the :command:`python -m line_profiler` and :command:`kernprof` CLI tools. """ +from __future__ import annotations + import argparse import functools import os import pathlib import shutil import sys +from os import PathLike +from typing import Protocol, Sequence, TypeVar from .toml_config import ConfigSource @@ -16,9 +20,32 @@ **{k.casefold(): True for k in ('1', 'on', 'True', 'T', 'yes', 'Y')}} +P_con = TypeVar('P_con', bound='ParserLike', contravariant=True) +A_co = TypeVar('A_co', bound='ActionLike', covariant=True) + + +class ActionLike(Protocol[P_con]): + def __call__(self, parser: P_con, namespace: argparse.Namespace, + values: str | Sequence[object] | None, + option_string: str | None = None) -> None: + ... + + def format_usage(self) -> str: + ... + + +class ParserLike(Protocol[A_co]): + def add_argument(self, arg: str, /, *args: str, **kwargs: object) -> A_co: + ... + + @property + def prefix_chars(self) -> str: + ... + -def add_argument(parser_like, arg, /, *args, - hide_complementary_options=True, **kwargs): +def add_argument(parser_like: ParserLike[A_co], arg: str, /, *args: str, + hide_complementary_options: bool = True, + **kwargs: object) -> A_co: """ Override the ``'store_true'`` and ``'store_false'`` actions so that they are turned into options which: @@ -126,7 +153,8 @@ def negated(*args, **kwargs): long_kwargs['help'] = f'({additional_msg})' short_kwargs['help'] = argparse.SUPPRESS - long_action = short_action = None + long_action: A_co | None = None + short_action: A_co | None = None if long_flags: long_action = parser_like.add_argument(*long_flags, **long_kwargs) short_kwargs['dest'] = long_action.dest @@ -158,7 +186,8 @@ def negated(*args, **kwargs): return action -def get_cli_config(subtable, /, *args, **kwargs): +def get_cli_config(subtable: str, /, *args: object, + **kwargs: object) -> ConfigSource: """ Get the ``tool.line_profiler.`` configs and normalize its keys (``some-key`` -> ``some_key``). @@ -181,7 +210,7 @@ def get_cli_config(subtable, /, *args, **kwargs): return config -def get_python_executable(): +def get_python_executable() -> str: """ Returns: str: command @@ -196,7 +225,7 @@ def get_python_executable(): return short_string_path(sys.executable) -def positive_float(value): +def positive_float(value: str) -> float: """ Arguments: value (str) @@ -214,7 +243,8 @@ def positive_float(value): return val -def boolean(value, *, fallback=None, invert=False): +def boolean(value: str, *, fallback: bool | None = None, + invert: bool = False) -> bool: """ Arguments: value (str) @@ -275,7 +305,7 @@ def boolean(value, *, fallback=None, invert=False): return fallback -def short_string_path(path): +def short_string_path(path: str | PathLike[str]) -> str: """ Arguments: path (str | os.PathLike[str]): diff --git a/line_profiler/cli_utils.pyi b/line_profiler/cli_utils.pyi deleted file mode 100644 index 182efe98..00000000 --- a/line_profiler/cli_utils.pyi +++ /dev/null @@ -1,60 +0,0 @@ -""" -Shared utilities between the :command:`python -m line_profiler` and -:command:`kernprof` CLI tools. -""" -import argparse -import pathlib -from os import PathLike -from typing import Protocol, Sequence, Tuple, TypeVar - -from line_profiler.toml_config import ConfigSource - - -P_con = TypeVar('P_con', bound='ParserLike', contravariant=True) -A_co = TypeVar('A_co', bound='ActionLike', covariant=True) - - -class ActionLike(Protocol[P_con]): - def __call__(self, parser: P_con, - namespace: argparse.Namespace, - values: str | Sequence | None, - option_string: str | None = None) -> None: - ... - - def format_usage(self) -> str: - ... - - -class ParserLike(Protocol[A_co]): - def add_argument(self, arg: str, /, *args: str, **kwargs) -> A_co: - ... - - @property - def prefix_chars(self) -> str: - ... - - -def add_argument(parser_like: ParserLike[A_co], arg: str, /, *args: str, - hide_complementary_options: bool = True, **kwargs) -> A_co: - ... - - -def get_cli_config(subtable: str, /, *args, **kwargs) -> ConfigSource: - ... - - -def get_python_executable() -> str: - ... - - -def positive_float(value: str) -> float: - ... - - -def boolean(value: str, *, - fallback: bool | None = None, invert: bool = False) -> bool: - ... - - -def short_string_path(path: str | PathLike[str]) -> str: - ... diff --git a/line_profiler/explicit_profiler.py b/line_profiler/explicit_profiler.py index 787c4ac2..8deb4254 100644 --- a/line_profiler/explicit_profiler.py +++ b/line_profiler/explicit_profiler.py @@ -181,6 +181,9 @@ def func4(): from .line_profiler import LineProfiler from .toml_config import ConfigSource +F = TypeVar('F', bound=Callable[..., Any]) +ConfigArg = str | pathlib.PurePath | bool | None + # The first process that enables profiling records its PID here. Child processes # created via multiprocessing (spawn/forkserver) inherit this environment value, # which helps prevent helper processes from claiming ownership and clobbering @@ -291,6 +294,7 @@ def __init__(self, config: ConfigArg = None) -> None: self._profile = None self._owner_pid = None self.enabled = None + # Configs: # - How to toggle the profiler self.setup_config = config_source.conf_dict['setup'] diff --git a/line_profiler/ipython_extension.py b/line_profiler/ipython_extension.py index 17a58867..4e5c9588 100644 --- a/line_profiler/ipython_extension.py +++ b/line_profiler/ipython_extension.py @@ -32,6 +32,7 @@ .. |lprun_all| replace:: :py:data:`%%lprun_all ` .. |builtins| replace:: :py:mod:`__builtins__ ` """ +from __future__ import annotations import ast import builtins @@ -44,11 +45,10 @@ from contextlib import ExitStack from dataclasses import dataclass from pathlib import Path -from typing import TYPE_CHECKING, Union -if TYPE_CHECKING: # pragma: no cover - from typing import (Callable, ParamSpec, # noqa: F401 - Any, ClassVar, TypeVar) +from typing import TYPE_CHECKING, Any, Callable, ClassVar, TypeVar +from typing_extensions import ParamSpec +if TYPE_CHECKING: # pragma: no cover PS = ParamSpec('PS') PD = TypeVar('PD', bound='_PatchDict') DefNode = TypeVar('DefNode', ast.FunctionDef, ast.AsyncFunctionDef) @@ -104,26 +104,26 @@ class _ParseParamResult: opts: Struct arg_str: str - def __getattr__(self, attr): # type: (str) -> Any + def __getattr__(self, attr: str) -> Any: """ Defers to :py:attr:`_ParseParamResult.opts`.""" return getattr(self.opts, attr) @functools.cached_property - def dump_raw_dest(self): # type: () -> Path | None + def dump_raw_dest(self) -> Path | None: path = self.opts.D[0] if path: return Path(path) return None @functools.cached_property - def dump_text_dest(self): # type: () -> Path | None + def dump_text_dest(self) -> Path | None: path = self.opts.T[0] if path: return Path(path) return None @functools.cached_property - def output_unit(self): # type: () -> float | None + def output_unit(self) -> float | None: if self.opts.u is None: return None try: @@ -132,11 +132,11 @@ def output_unit(self): # type: () -> float | None raise TypeError("Timer unit setting must be a float.") @functools.cached_property - def strip_zero(self): # type: () -> bool + def strip_zero(self) -> bool: return "z" in self.opts @functools.cached_property - def return_profiler(self): # type: () -> bool + def return_profiler(self) -> bool: return "r" in self.opts @@ -147,9 +147,9 @@ class _RunAndProfileResult: """ stats: LineStats parse_result: _ParseParamResult - message: Union[str, None] = None - time_elapsed: Union[float, None] = None - tempfile: Union[str, 'os.PathLike[str]', None] = None + message: str | None = None + time_elapsed: float | None = None + tempfile: str | os.PathLike[str] | None = None def __post_init__(self): if self.tempfile is not None: @@ -185,7 +185,7 @@ def show_func_wrapper( line_profiler, get_code_block=get_code_block_wrapper): return call() - def get_code_block_wrapper(filename, lineno): + def get_code_block_wrapper(filename: str, lineno: int) -> list[str]: """ Return the entire content of :py:attr:`~.tempfile`.""" with tmp.open(mode='r') as fobj: return fobj.read().splitlines(keepends=True) @@ -193,7 +193,7 @@ def get_code_block_wrapper(filename, lineno): return show_func_wrapper @functools.cached_property - def output(self): # type: () -> str + def output(self) -> str: with ExitStack() as stack: cap = stack.enter_context(StringIO()) # Trap text output patch_show_func = _PatchDict.from_module( @@ -229,14 +229,13 @@ class _PatchProfilerIntoBuiltins: skip this doctest if :py:mod:`IPython` (and hence this module) can't be imported. """ - def __init__(self, prof=None): - # type: (LineProfiler | None) -> None + def __init__(self, prof: LineProfiler | None = None) -> None: if prof is None: prof = LineProfiler() self.prof = prof self._ctx = _PatchDict.from_module(builtins, profile=self.prof) - def __enter__(self): # type: () -> LineProfiler + def __enter__(self) -> LineProfiler: self._ctx.__enter__() return self.prof @@ -245,14 +244,14 @@ def __exit__(self, *a, **k): class _PatchDict: - def __init__(self, namespace, /, **kwargs): - # type: (dict[str, Any], Any) -> None + def __init__(self, namespace: dict[str, Any], /, + **kwargs: Any) -> None: self.namespace = namespace self.replacements = kwargs - self._stack = [] # type: list[dict[str, Any]] + self._stack: list[dict[str, Any]] = [] self._absent = object() - def __enter__(self): # type: (PD) -> PD + def __enter__(self: PD) -> PD: self._push() return self @@ -278,15 +277,16 @@ def _pop(self): namespace[key] = value @classmethod - def from_module(cls, module, /, **kwargs): - # type: (type[PD], types.ModuleType, Any) -> PD + def from_module(cls: type[PD], module: types.ModuleType, /, + **kwargs: Any) -> PD: return cls(vars(module), **kwargs) @magics_class class LineProfilerMagics(Magics): - def _parse_parameters(self, parameter_s, getopt_spec, opts_def): - # type: (str, str, Struct) -> _ParseParamResult + def _parse_parameters( + self, parameter_s: str, getopt_spec: str, + opts_def: Struct) -> _ParseParamResult: # FIXME: There is a chance that this handling will need to be # updated to handle single-quoted characters better (#382) parameter_s = parameter_s.replace('"', r"\"").replace("'", r"\"") @@ -297,13 +297,13 @@ def _parse_parameters(self, parameter_s, getopt_spec, opts_def): return _ParseParamResult(opts, arg_str) @staticmethod - def _run_and_profile(prof, # type: LineProfiler - parse_result, # type: _ParseParamResult - tempfile, # type: str | None - method, # type: Callable[PS, Any] - *args, # type: PS.args - **kwargs, # type: PS.kwargs - ): # type: (...) -> _RunAndProfileResult + def _run_and_profile( + prof: LineProfiler, + parse_result: _ParseParamResult, + tempfile: str | None, + method: Callable[PS, Any], + *args: PS.args, + **kwargs: PS.kwargs) -> _RunAndProfileResult: # Use the time module because it's easier than parsing the # output from `show_text()`. # `perf_counter()` is a monotonically increasing alternative to @@ -323,8 +323,8 @@ def _run_and_profile(prof, # type: LineProfiler message=message, time_elapsed=total_time, tempfile=tempfile) @classmethod - def _lprun_all_get_rewritten_profiled_code(cls, tmpfile): - # type: (str) -> types.CodeType + def _lprun_all_get_rewritten_profiled_code( + cls, tmpfile: str) -> types.CodeType: """ Transform and compile the AST of the profiled code. This is similar to :py:meth:`.LineProfiler.runctx`, """ @@ -334,15 +334,16 @@ def _lprun_all_get_rewritten_profiled_code(cls, tmpfile): return compile(tree, tmpfile, "exec") @classmethod - def _lprun_get_top_level_profiled_code(cls, tmpfile): - # type: (str) -> types.CodeType + def _lprun_get_top_level_profiled_code( + cls, tmpfile: str) -> types.CodeType: """ Compile the profiled code.""" with open(tmpfile, mode='r') as fobj: return compile(fobj.read(), tmpfile, "exec") @staticmethod - def _handle_end(prof, run_result): - # type: (LineProfiler, _RunAndProfileResult) -> LineProfiler | None + def _handle_end( + prof: LineProfiler, + run_result: _RunAndProfileResult) -> LineProfiler | None: page(run_result.output) dump_file = run_result.parse_result.dump_raw_dest @@ -363,7 +364,7 @@ def _handle_end(prof, run_result): return prof if run_result.parse_result.return_profiler else None @line_magic - def lprun(self, parameter_s=""): + def lprun(self, parameter_s: str = "") -> LineProfiler | None: """Execute a statement under the line-by-line profiler from the :py:mod:`line_profiler` module. @@ -449,7 +450,8 @@ def lprun(self, parameter_s=""): return self._handle_end(profile, run) @cell_magic - def lprun_all(self, parameter_s="", cell=""): + def lprun_all(self, parameter_s: str = "", + cell: str = "") -> LineProfiler | None: """Execute the whole notebook cell under the line-by-line profiler from the :py:mod:`line_profiler` module. diff --git a/line_profiler/ipython_extension.pyi b/line_profiler/ipython_extension.pyi deleted file mode 100644 index 8bba105b..00000000 --- a/line_profiler/ipython_extension.pyi +++ /dev/null @@ -1,12 +0,0 @@ -from IPython.core.magic import Magics -from . import LineProfiler - - -class LineProfilerMagics(Magics): - def lprun(self, parameter_s: str = ...) -> LineProfiler | None: - ... - - def lprun_all(self, - parameter_s: str = "", - cell: str = "") -> LineProfiler | None: - ... diff --git a/line_profiler/line_profiler.py b/line_profiler/line_profiler.py index e146567e..95889dd2 100755 --- a/line_profiler/line_profiler.py +++ b/line_profiler/line_profiler.py @@ -4,7 +4,10 @@ inspect its output. This depends on the :py:mod:`line_profiler._line_profiler` Cython backend. """ +from __future__ import annotations + import functools +import io import inspect import linecache import operator @@ -16,6 +19,11 @@ import tokenize from argparse import ArgumentParser from datetime import datetime +from os import PathLike +from typing import (TYPE_CHECKING, Callable, Literal, Mapping, Protocol, + TypeVar, overload) +from typing_extensions import ParamSpec, Self +from functools import cached_property, partial, partialmethod try: from ._line_profiler import (LineProfiler as CLineProfiler, @@ -29,16 +37,25 @@ from .cli_utils import ( add_argument, get_cli_config, positive_float, short_string_path) from .profiler_mixin import ByCountProfilerMixin, is_c_level_callable -from .scoping_policy import ScopingPolicy +from .scoping_policy import ScopingPolicy, ScopingPolicyDict from .toml_config import ConfigSource +if TYPE_CHECKING: # pragma: no cover + from .profiler_mixin import CLevelCallable, UnparametrizedCallableLike + # NOTE: This needs to be in sync with ../kernprof.py and __init__.py __version__ = '5.0.1' +T = TypeVar('T') +T_co = TypeVar('T_co', covariant=True) +PS = ParamSpec('PS') + @functools.lru_cache() -def get_column_widths(config=False): +def get_column_widths( + config: bool | str | PathLike[str] | None = False +) -> Mapping[Literal['line', 'hits', 'time', 'perhit', 'percent'], int]: """ Arguments config (bool | str | pathlib.PurePath | None) @@ -53,14 +70,14 @@ def get_column_widths(config=False): return types.MappingProxyType(subconf.conf_dict) -def load_ipython_extension(ip): +def load_ipython_extension(ip: object) -> None: """ API for IPython to recognize this module as an IPython extension. """ from .ipython_extension import LineProfilerMagics ip.register_magics(LineProfilerMagics) -def get_code_block(filename, lineno): +def get_code_block(filename: os.PathLike[str] | str, lineno: int) -> list[str]: """ Get the lines in the code block in a file starting from required line number; understands Cython code. @@ -163,7 +180,8 @@ class _CythonBlockFinder(inspect.BlockFinder): is public but undocumented API. See similar caveat in :py:func:`~.get_code_block`. """ - def tokeneater(self, type, token, *args, **kwargs): + def tokeneater(self, type: int, token: str, + *args: object, **kwargs: object) -> object: if ( not self.started and type == tokenize.NAME @@ -183,17 +201,22 @@ class _WrapperInfo: profiler_id (int) ID of the `LineProfiler`. """ - def __init__(self, func, profiler_id): + def __init__(self, func: types.FunctionType, profiler_id: int) -> None: self.func = func self.profiler_id = profiler_id +class _StatsLike(Protocol): + timings: Mapping[tuple[str, int, str], list[tuple[int, int, int]]] + unit: float + + class LineStats(CLineStats): - def __repr__(self): + def __repr__(self) -> str: return '{}({}, {:.2G})'.format( type(self).__name__, self.timings, self.unit) - def __eq__(self, other): + def __eq__(self, other: object) -> bool: """ Example: >>> from copy import deepcopy @@ -222,7 +245,7 @@ def __eq__(self, other): return NotImplemented return True - def __add__(self, other): + def __add__(self, other: _StatsLike) -> Self: """ Example: >>> stats1 = LineStats( @@ -246,7 +269,7 @@ def __add__(self, other): timings, unit = self._get_aggregated_timings([self, other]) return type(self)(timings, unit) - def __iadd__(self, other): + def __iadd__(self, other: _StatsLike) -> Self: """ Example: >>> stats1 = LineStats( @@ -273,17 +296,20 @@ def __iadd__(self, other): self.timings, self.unit = self._get_aggregated_timings([self, other]) return self - def print(self, stream=None, **kwargs): + def print(self, stream: io.TextIOBase | None = None, + **kwargs: object) -> None: show_text(self.timings, self.unit, stream=stream, **kwargs) - def to_file(self, filename): + def to_file(self, filename: PathLike[str] | str) -> None: """ Pickle the instance to the given filename. """ with open(filename, 'wb') as f: pickle.dump(self, f, pickle.HIGHEST_PROTOCOL) @classmethod - def from_files(cls, file, /, *files): + def from_files( + cls, file: PathLike[str] | str, /, + *files: PathLike[str] | str) -> Self: """ Utility function to load an instance from the given filenames. """ @@ -294,7 +320,9 @@ def from_files(cls, file, /, *files): return cls.from_stats_objects(*stats_objs) @classmethod - def from_stats_objects(cls, stats, /, *more_stats): + def from_stats_objects( + cls, stats: _StatsLike, /, + *more_stats: _StatsLike) -> Self: """ Example: >>> stats1 = LineStats( @@ -368,7 +396,45 @@ class LineProfiler(CLineProfiler, ByCountProfilerMixin): >>> func() >>> profile.print_stats() """ - def __call__(self, func): + @overload + def __call__(self, func: CLevelCallable) -> CLevelCallable: + ... + + @overload + def __call__(self, func: UnparametrizedCallableLike) -> UnparametrizedCallableLike: + ... + + @overload + def __call__(self, func: type[T]) -> type[T]: + ... + + @overload + def __call__(self, func: partial[T]) -> partial[T]: + ... + + @overload + def __call__(self, func: partialmethod[T]) -> partialmethod[T]: + ... + + @overload + def __call__(self, func: cached_property[T_co]) -> cached_property[T_co]: + ... + + @overload + def __call__(self, func: staticmethod[PS, T_co]) -> staticmethod[PS, T_co]: + ... + + @overload + def __call__( + self, func: classmethod[type[T], PS, T_co], + ) -> classmethod[type[T], PS, T_co]: + ... + + @overload + def __call__(self, func: Callable) -> types.FunctionType: + ... + + def __call__(self, func: object): """ Decorate a function, method, :py:class:`property`, :py:func:`~functools.partial` object etc. to start the profiler @@ -384,12 +450,15 @@ def __call__(self, func): self.add_callable(func) return self.wrap_callable(func) - def wrap_callable(self, func): + def wrap_callable(self, func: object): if is_c_level_callable(func): # Non-profilable return func return super().wrap_callable(func) - def add_callable(self, func, guard=None, name=None): + def add_callable( + self, func: object, + guard: Callable[[types.FunctionType], bool] | None = None, + name: str | None = None) -> Literal[0, 1]: """ Register a function, method, :py:class:`property`, :py:func:`~functools.partial` object, etc. with the underlying @@ -461,18 +530,21 @@ def _debug(self, msg): msg = f'{self_repr}: {msg}' logger.debug(msg) - def get_stats(self): + def get_stats(self) -> LineStats: return LineStats.from_stats_objects(super().get_stats()) - def dump_stats(self, filename): + def dump_stats(self, filename: os.PathLike[str] | str) -> None: """ Dump a representation of the data to a file as a pickled :py:class:`~.LineStats` object from :py:meth:`~.get_stats()`. """ self.get_stats().to_file(filename) - def print_stats(self, stream=None, output_unit=None, stripzeros=False, - details=True, summarize=False, sort=False, rich=False, *, - config=None): + def print_stats( + self, stream: io.TextIOBase | None = None, + output_unit: float | None = None, stripzeros: bool = False, + details: bool = True, summarize: bool = False, + sort: bool = False, rich: bool = False, *, + config: str | PathLike[str] | bool | None = None) -> None: """ Show the gathered statistics. """ self.get_stats().print( @@ -546,7 +618,10 @@ def func_guard(func): self._repr_for_log(namespace, name))) return count - def add_class(self, cls, *, scoping_policy=None, wrap=False): + def add_class( + self, cls: type, *, + scoping_policy: ScopingPolicy | str | ScopingPolicyDict | None = None, + wrap: bool = False) -> int: """ Add the members (callables (wrappers), methods, classes, ...) in a class' local namespace and profile them. @@ -591,7 +666,10 @@ def add_class(self, cls, *, scoping_policy=None, wrap=False): module_scoping_policy=policies['module'], wrap=wrap) - def add_module(self, mod, *, scoping_policy=None, wrap=False): + def add_module( + self, mod: types.ModuleType, *, + scoping_policy: ScopingPolicy | str | ScopingPolicyDict | None = None, + wrap: bool = False) -> int: """ Add the members (callables (wrappers), methods, classes, ...) in a module's local namespace and profile them. @@ -658,7 +736,7 @@ def _mark_wrapper(self, wrapper): # This could be in the ipython_extension submodule, # but it doesn't depend on the IPython module so it's easier to just let it stay here. -def is_generated_code(filename): +def is_generated_code(filename: str) -> bool: """ Return True if a filename corresponds to generated code, such as a Jupyter Notebook cell. """ @@ -672,10 +750,13 @@ def is_generated_code(filename): ) -def show_func(filename, start_lineno, func_name, timings, unit, - output_unit=None, stream=None, stripzeros=False, rich=False, +def show_func(filename: str, start_lineno: int, func_name: str, + timings: list[tuple[int, int, float]], unit: float, + output_unit: float | None = None, + stream: io.TextIOBase | None = None, + stripzeros: bool = False, rich: bool = False, *, - config=None): + config: str | PathLike[str] | bool | None = None) -> None: """ Show results for a single function. @@ -895,9 +976,12 @@ def show_func(filename, start_lineno, func_name, timings, unit, stream.write('\n') -def show_text(stats, unit, output_unit=None, stream=None, stripzeros=False, - details=True, summarize=False, sort=False, rich=False, *, - config=None): +def show_text(stats: _StatsLike, unit: float, + output_unit: float | None = None, + stream: io.TextIOBase | None = None, + stripzeros: bool = False, details: bool = True, + summarize: bool = False, sort: bool = False, rich: bool = False, + *, config: str | PathLike[str] | bool | None = None) -> None: """ Show text for the given timings. @@ -967,7 +1051,7 @@ def show_text(stats, unit, output_unit=None, stream=None, stripzeros=False, load_stats = LineStats.from_files -def main(): +def main() -> None: """ The line profiler CLI to view output from :command:`kernprof -l`. """ diff --git a/line_profiler/line_profiler_utils.py b/line_profiler/line_profiler_utils.py index 0d903888..fcc6c2cf 100644 --- a/line_profiler/line_profiler_utils.py +++ b/line_profiler/line_profiler_utils.py @@ -1,7 +1,10 @@ """ Miscellaneous utilities that :py:mod:`line_profiler` uses. """ +from __future__ import annotations + import enum +from typing_extensions import Self class _StrEnumBase(str, enum.Enum): @@ -28,13 +31,13 @@ class _StrEnumBase(str, enum.Enum): ValueError: 'baz' is not a valid MyEnum """ @staticmethod - def _generate_next_value_(name, *_, **__): + def _generate_next_value_(name: str, *_, **__) -> str: return name.lower() - def __eq__(self, other): + def __eq__(self, other: object) -> bool: return self.value == other - def __str__(self): + def __str__(self) -> str: return self.value @@ -65,7 +68,7 @@ class StringEnum(getattr(enum, 'StrEnum', _StrEnumBase)): 'bar' """ @classmethod - def _missing_(cls, value): + def _missing_(cls, value: object) -> Self | None: if not isinstance(value, str): return None members = {name.casefold(): instance diff --git a/line_profiler/line_profiler_utils.pyi b/line_profiler/line_profiler_utils.pyi deleted file mode 100644 index a510cf94..00000000 --- a/line_profiler/line_profiler_utils.pyi +++ /dev/null @@ -1,26 +0,0 @@ -import enum -try: - from typing import Self # type: ignore[attr-defined] # noqa: F401 -except ImportError: # Python < 3.11 - from typing_extensions import Self # noqa: F401 - - -# Note: `mypy` tries to read this class as a free-standing enum -# (instead of an `enum.Enum` subclass that string enums are to inherit -# from), and complains that it has no members -- so silence that - - -class StringEnum(str, enum.Enum): # type: ignore[misc] - @staticmethod - def _generate_next_value_(name: str, *_, **__) -> str: - ... - - def __eq__(self, other) -> bool: - ... - - def __str__(self) -> str: - ... - - @classmethod - def _missing_(cls, value) -> Self | None: - ... diff --git a/line_profiler/profiler_mixin.py b/line_profiler/profiler_mixin.py index 3bc98c1d..d5602bbf 100644 --- a/line_profiler/profiler_mixin.py +++ b/line_profiler/profiler_mixin.py @@ -1,8 +1,15 @@ +from __future__ import annotations + import functools import inspect import types +from functools import cached_property, partial, partialmethod from sys import version_info +from typing import (TYPE_CHECKING, Any, Callable, Mapping, Protocol, TypeVar, + overload) +from typing_extensions import ParamSpec, TypeIs from warnings import warn +from ._line_profiler import label from .scoping_policy import ScopingPolicy @@ -26,8 +33,108 @@ # https://cython.readthedocs.io/en/latest/src/tutorial/profiling_tutorial.html _CANNOT_LINE_TRACE_CYTHON = (3, 12) <= version_info < (3, 13, 0, 'beta', 1) +UnparametrizedCallableLike = TypeVar( + 'UnparametrizedCallableLike', + types.FunctionType, property, types.MethodType) +T = TypeVar('T') +T_co = TypeVar('T_co', covariant=True) +PS = ParamSpec('PS') + +if TYPE_CHECKING: + class CythonCallable(Protocol[PS, T_co]): + def __call__(self, *args: PS.args, **kwargs: PS.kwargs) -> T_co: + ... + + @property + def __code__(self) -> types.CodeType: + ... + + @property + def func_code(self) -> types.CodeType: + ... + + @property + def __name__(self) -> str: + ... + + @property + def func_name(self) -> str: + ... + + @property + def __qualname__(self) -> str: + ... + + @property + def __doc__(self) -> str | None: + ... + + @__doc__.setter + def __doc__(self, doc: str | None) -> None: + ... + + @property + def func_doc(self) -> str | None: + ... + + @property + def __globals__(self) -> dict[str, Any]: + ... + + @property + def func_globals(self) -> dict[str, Any]: + ... + + @property + def __dict__(self) -> dict[str, Any]: + ... + + @__dict__.setter + def __dict__(self, dict: dict[str, Any]) -> None: + ... + + @property + def func_dict(self) -> dict[str, Any]: + ... -def is_c_level_callable(func): + @property + def __annotations__(self) -> dict[str, Any]: + ... + + @__annotations__.setter + def __annotations__(self, annotations: dict[str, Any]) -> None: + ... + + @property + def __defaults__(self): + ... + + @property + def func_defaults(self): + ... + + @property + def __kwdefaults__(self): + ... + + @property + def __closure__(self): + ... + + @property + def func_closure(self): + ... +else: + CythonCallable = type(label) + +CLevelCallable = TypeVar( + 'CLevelCallable', + types.BuiltinFunctionType, types.BuiltinMethodType, + types.ClassMethodDescriptorType, types.MethodDescriptorType, + types.MethodWrapperType, types.WrapperDescriptorType) + + +def is_c_level_callable(func: Any) -> TypeIs[CLevelCallable]: """ Returns: func_is_c_level (bool): @@ -37,7 +144,7 @@ def is_c_level_callable(func): return isinstance(func, C_LEVEL_CALLABLE_TYPES) -def is_cython_callable(func): +def is_cython_callable(func: Any) -> TypeIs[CythonCallable]: if not callable(func): return False # Note: don't directly check against a Cython function type, since @@ -48,31 +155,31 @@ def is_cython_callable(func): in ('cython_function_or_method', 'fused_cython_function')) -def is_classmethod(f): +def is_classmethod(f: Any) -> TypeIs[classmethod]: return isinstance(f, classmethod) -def is_staticmethod(f): +def is_staticmethod(f: Any) -> TypeIs[staticmethod]: return isinstance(f, staticmethod) -def is_boundmethod(f): +def is_boundmethod(f: Any) -> TypeIs[types.MethodType]: return isinstance(f, types.MethodType) -def is_partialmethod(f): +def is_partialmethod(f: Any) -> TypeIs[partialmethod]: return isinstance(f, functools.partialmethod) -def is_partial(f): +def is_partial(f: Any) -> TypeIs[partial]: return isinstance(f, functools.partial) -def is_property(f): +def is_property(f: Any) -> TypeIs[property]: return isinstance(f, property) -def is_cached_property(f): +def is_cached_property(f: Any) -> TypeIs[cached_property]: return isinstance(f, functools.cached_property) @@ -86,7 +193,37 @@ class ByCountProfilerMixin: Used by :py:class:`line_profiler.line_profiler.LineProfiler` and :py:class:`kernprof.ContextualProfile`. """ - def wrap_callable(self, func): + @overload + def wrap_callable(self, func: CLevelCallable) -> CLevelCallable: + ... + + @overload + def wrap_callable( + self, func: UnparametrizedCallableLike, + ) -> UnparametrizedCallableLike: + ... + + @overload + def wrap_callable(self, func: type[T]) -> type[T]: + ... + + @overload + def wrap_callable(self, func: partial[T]) -> partial[T]: + ... + + @overload + def wrap_callable(self, func: partialmethod[T]) -> partialmethod[T]: + ... + + @overload + def wrap_callable(self, func: cached_property[T_co]) -> cached_property[T_co]: + ... + + @overload + def wrap_callable(self, func: staticmethod[PS, T_co]) -> staticmethod[PS, T_co]: + ... + + def wrap_callable(self, func: object): """ Decorate a function to start the profiler on function entry and stop it on function exit. @@ -119,7 +256,8 @@ def wrap_callable(self, func): 'callable wrapper') @classmethod - def get_underlying_functions(cls, func): + def get_underlying_functions( + cls, func: object) -> list[types.FunctionType]: """ Get the underlying function objects of a callable or an adjacent object. @@ -130,7 +268,9 @@ def get_underlying_functions(cls, func): return cls._get_underlying_functions(func) @classmethod - def _get_underlying_functions(cls, func, seen=None, stop_at_classes=False): + def _get_underlying_functions( + cls, func: object, seen: set[int] | None = None, + stop_at_classes: bool = False) -> list[types.FunctionType | type]: if seen is None: seen = set() kwargs = {'seen': seen, 'stop_at_classes': stop_at_classes} @@ -169,7 +309,8 @@ def _get_underlying_functions(cls, func, seen=None, stop_at_classes=False): @classmethod def _get_underlying_functions_from_property( - cls, prop, seen, stop_at_classes): + cls, prop: property, seen: set[int], + stop_at_classes: bool) -> list[types.FunctionType | type]: result = [] for impl in prop.fget, prop.fset, prop.fdel: if impl is not None: @@ -178,7 +319,9 @@ def _get_underlying_functions_from_property( return result @classmethod - def _get_underlying_functions_from_type(cls, kls, seen, stop_at_classes): + def _get_underlying_functions_from_type( + cls, kls: type, seen: set[int], + stop_at_classes: bool) -> list[types.FunctionType | type]: result = [] get_filter = cls._class_scoping_policy.get_filter func_check = get_filter(kls, 'func') diff --git a/line_profiler/profiler_mixin.pyi b/line_profiler/profiler_mixin.pyi deleted file mode 100644 index ba7a9d3a..00000000 --- a/line_profiler/profiler_mixin.pyi +++ /dev/null @@ -1,271 +0,0 @@ -from functools import cached_property, partial, partialmethod -from types import (CodeType, FunctionType, MethodType, - BuiltinFunctionType, BuiltinMethodType, - ClassMethodDescriptorType, MethodDescriptorType, - MethodWrapperType, WrapperDescriptorType) -from typing import (TYPE_CHECKING, overload, - Any, Callable, Mapping, Protocol, TypeVar) -try: - from typing import ( # type: ignore[attr-defined] # noqa: F401 - ParamSpec) -except ImportError: # Python < 3.10 - from typing_extensions import ParamSpec # noqa: F401 -try: - from typing import ( # type: ignore[attr-defined] # noqa: F401 - Self) -except ImportError: # Python < 3.11 - from typing_extensions import Self # noqa: F401 -try: - from typing import ( # type: ignore[attr-defined] # noqa: F401 - TypeIs) -except ImportError: # Python < 3.13 - from typing_extensions import TypeIs # noqa: F401 -from ._line_profiler import label - - -UnparametrizedCallableLike = TypeVar('UnparametrizedCallableLike', - FunctionType, property, MethodType) -T = TypeVar('T') -T_co = TypeVar('T_co', covariant=True) -PS = ParamSpec('PS') - -if TYPE_CHECKING: - class CythonCallable(Protocol[PS, T_co]): - def __call__(self, *args: PS.args, **kwargs: PS.kwargs) -> T_co: - ... - - @property - def __code__(self) -> CodeType: - ... - - @property - def func_code(self) -> CodeType: - ... - - @property - def __name__(self) -> str: - ... - - @property - def func_name(self) -> str: - ... - - @property - def __qualname__(self) -> str: - ... - - @property - def __doc__(self) -> str | None: - ... - - @__doc__.setter - def __doc__(self, doc: str | None) -> None: - ... - - @property - def func_doc(self) -> str | None: - ... - - @property - def __globals__(self) -> dict[str, Any]: - ... - - @property - def func_globals(self) -> dict[str, Any]: - ... - - @property - def __dict__(self) -> dict[str, Any]: - ... - - @__dict__.setter - def __dict__(self, dict: dict[str, Any]) -> None: - ... - - @property - def func_dict(self) -> dict[str, Any]: - ... - - @property - def __annotations__(self) -> dict[str, Any]: - ... - - @__annotations__.setter - def __annotations__(self, annotations: dict[str, Any]) -> None: - ... - - @property - def __defaults__(self): - ... - - @property - def func_defaults(self): - ... - - @property - def __kwdefaults__(self): - ... - - @property - def __closure__(self): - ... - - @property - def func_closure(self): - ... - - -else: - CythonCallable = type(label) - -CLevelCallable = TypeVar('CLevelCallable', - BuiltinFunctionType, BuiltinMethodType, - ClassMethodDescriptorType, MethodDescriptorType, - MethodWrapperType, WrapperDescriptorType) - - -def is_c_level_callable(func: Any) -> TypeIs[CLevelCallable]: - ... - - -def is_cython_callable(func: Any) -> TypeIs[CythonCallable]: - ... - - -def is_classmethod(f: Any) -> TypeIs[classmethod]: - ... - - -def is_staticmethod(f: Any) -> TypeIs[staticmethod]: - ... - - -def is_boundmethod(f: Any) -> TypeIs[MethodType]: - ... - - -def is_partialmethod(f: Any) -> TypeIs[partialmethod]: - ... - - -def is_partial(f: Any) -> TypeIs[partial]: - ... - - -def is_property(f: Any) -> TypeIs[property]: - ... - - -def is_cached_property(f: Any) -> TypeIs[cached_property]: - ... - - -class ByCountProfilerMixin: - def get_underlying_functions(self, func) -> list[FunctionType]: - ... - - @overload - def wrap_callable(self, # type: ignore[overload-overlap] - func: CLevelCallable) -> CLevelCallable: - ... - - @overload - def wrap_callable( # type: ignore[overload-overlap] - self, func: UnparametrizedCallableLike, - ) -> UnparametrizedCallableLike: - ... - - @overload - def wrap_callable(self, # type: ignore[overload-overlap] - func: type[T]) -> type[T]: - ... - - @overload - def wrap_callable(self, # type: ignore[overload-overlap] - func: partial[T]) -> partial[T]: - ... - - @overload - def wrap_callable(self, func: partialmethod[T]) -> partialmethod[T]: - ... - - @overload - def wrap_callable(self, - func: cached_property[T_co]) -> cached_property[T_co]: - ... - - @overload - def wrap_callable(self, # type: ignore[overload-overlap] - func: staticmethod[PS, T_co]) -> staticmethod[PS, T_co]: - ... - - @overload - def wrap_callable( - self, func: classmethod[type[T], PS, T_co], - ) -> classmethod[type[T], PS, T_co]: - ... - - # Fallback: just return a wrapper function around a generic callable - - @overload - def wrap_callable(self, func: Callable) -> FunctionType: - ... - - def wrap_classmethod( - self, func: classmethod[type[T], PS, T_co], - ) -> classmethod[type[T], PS, T_co]: - ... - - def wrap_staticmethod( - self, func: staticmethod[PS, T_co]) -> staticmethod[PS, T_co]: - ... - - def wrap_boundmethod(self, func: MethodType) -> MethodType: - ... - - def wrap_partialmethod(self, func: partialmethod[T]) -> partialmethod[T]: - ... - - def wrap_partial(self, func: partial[T]) -> partial[T]: - ... - - def wrap_property(self, func: property) -> property: - ... - - def wrap_cached_property( - self, func: cached_property[T_co]) -> cached_property[T_co]: - ... - - def wrap_async_generator(self, func: FunctionType) -> FunctionType: - ... - - def wrap_coroutine(self, func: FunctionType) -> FunctionType: - ... - - def wrap_generator(self, func: FunctionType) -> FunctionType: - ... - - def wrap_function(self, func: Callable) -> FunctionType: - ... - - def wrap_class(self, func: type[T]) -> type[T]: - ... - - def run(self, cmd: str) -> Self: - ... - - def runctx(self, - cmd: str, - globals: dict[str, Any] | None, - locals: Mapping[str, Any] | None) -> Self: - ... - - def runcall(self, func: Callable[PS, T], /, - *args: PS.args, **kw: PS.kwargs) -> T: - ... - - def __enter__(self) -> Self: - ... - - def __exit__(self, *_, **__) -> None: - ... diff --git a/line_profiler/scoping_policy.py b/line_profiler/scoping_policy.py index cedf51e9..0295bbb8 100644 --- a/line_profiler/scoping_policy.py +++ b/line_profiler/scoping_policy.py @@ -1,6 +1,8 @@ +from __future__ import annotations + from enum import auto -from types import MappingProxyType, ModuleType -from typing import Union, TypedDict +from types import FunctionType, MappingProxyType, ModuleType +from typing import Callable, Literal, TypedDict, overload from .line_profiler_utils import StringEnum @@ -97,7 +99,7 @@ class ScopingPolicy(StringEnum): # Verification - def __init_subclass__(cls, *args, **kwargs): + def __init_subclass__(cls, *args: object, **kwargs: object) -> None: """ Call :py:meth:`_check_class`. """ @@ -105,7 +107,7 @@ def __init_subclass__(cls, *args, **kwargs): cls._check_class() @classmethod - def _check_class(cls): + def _check_class(cls) -> None: """ Verify that :py:meth:`.get_filter` return a callable for all policy values and object types. @@ -122,7 +124,25 @@ class MockClass: # Filtering - def get_filter(self, namespace, obj_type): + @overload + def get_filter( + self, namespace: type | ModuleType, + obj_type: Literal['func']) -> Callable[[FunctionType], bool]: + ... + + @overload + def get_filter( + self, namespace: type | ModuleType, + obj_type: Literal['class']) -> Callable[[type], bool]: + ... + + @overload + def get_filter( + self, namespace: type | ModuleType, + obj_type: Literal['module']) -> Callable[[ModuleType], bool]: + ... + + def get_filter(self, namespace: type | ModuleType, obj_type: str): """ Args: namespace (Union[type, types.ModuleType]): @@ -157,7 +177,10 @@ def get_filter(self, namespace, obj_type): return method(namespace, is_class=(obj_type == 'class')) @classmethod - def to_policies(cls, policies=None): + def to_policies( + cls, + policies: str | ScopingPolicy | ScopingPolicyDict | None = None + ) -> _ScopingPolicyDict: """ Normalize ``policies`` into a dictionary of policies for various object types. @@ -222,26 +245,28 @@ def to_policies(cls, policies=None): 'module': cls(policies['module'])}) @staticmethod - def _return_const(value): + def _return_const(value: bool) -> Callable[[object], bool]: def return_const(*_, **__): return value return return_const @staticmethod - def _match_prefix(s, prefix, sep='.'): + def _match_prefix(s: str, prefix: str, sep: str = '.') -> bool: return s == prefix or s.startswith(prefix + sep) - def _get_callable_filter_in_class(self, cls, is_class): - def func_is_child(other): + def _get_callable_filter_in_class( + self, cls: type, is_class: bool + ) -> Callable[[FunctionType | type], bool]: + def func_is_child(other: FunctionType): if not modules_are_equal(other): return False return other.__qualname__ == f'{cls.__qualname__}.{other.__name__}' - def modules_are_equal(other): # = sibling check + def modules_are_equal(other: FunctionType | type): # = sibling check return cls.__module__ == other.__module__ - def func_is_descdendant(other): + def func_is_descdendant(other: FunctionType): if not modules_are_equal(other): return False return other.__qualname__.startswith(cls.__qualname__ + '.') @@ -254,14 +279,16 @@ def func_is_descdendant(other): 'siblings': modules_are_equal, 'none': self._return_const(True)}[self.value] - def _get_callable_filter_in_module(self, mod, is_class): - def func_is_child(other): + def _get_callable_filter_in_module( + self, mod: ModuleType, is_class: bool + ) -> Callable[[FunctionType | type], bool]: + def func_is_child(other: FunctionType): return other.__module__ == mod.__name__ - def func_is_descdendant(other): + def func_is_descdendant(other: FunctionType): return self._match_prefix(other.__module__, mod.__name__) - def func_is_cousin(other): + def func_is_cousin(other: FunctionType): if func_is_descdendant(other): return True return self._match_prefix(other.__module__, parent) @@ -277,14 +304,16 @@ def func_is_cousin(other): func_is_descdendant), 'none': self._return_const(True)}[self.value] - def _get_module_filter_in_module(self, mod): - def module_is_descendant(other): + def _get_module_filter_in_module( + self, mod: ModuleType + ) -> Callable[[ModuleType], bool]: + def module_is_descendant(other: ModuleType): return other.__name__.startswith(mod.__name__ + '.') - def module_is_child(other): + def module_is_child(other: ModuleType): return other.__name__.rpartition('.')[0] == mod.__name__ - def module_is_sibling(other): + def module_is_sibling(other: ModuleType): return other.__name__.startswith(parent + '.') parent, _, basename = mod.__name__.rpartition('.') @@ -301,11 +330,13 @@ def module_is_sibling(other): # the corresponding methods ScopingPolicy._check_class() -ScopingPolicyDict = TypedDict('ScopingPolicyDict', - {'func': Union[str, ScopingPolicy], - 'class': Union[str, ScopingPolicy], - 'module': Union[str, ScopingPolicy]}) -_ScopingPolicyDict = TypedDict('_ScopingPolicyDict', - {'func': ScopingPolicy, - 'class': ScopingPolicy, - 'module': ScopingPolicy}) +ScopingPolicyDict = TypedDict( + 'ScopingPolicyDict', + {'func': str | ScopingPolicy, + 'class': str | ScopingPolicy, + 'module': str | ScopingPolicy}) +_ScopingPolicyDict = TypedDict( + '_ScopingPolicyDict', + {'func': ScopingPolicy, + 'class': ScopingPolicy, + 'module': ScopingPolicy}) diff --git a/line_profiler/scoping_policy.pyi b/line_profiler/scoping_policy.pyi deleted file mode 100644 index e6987289..00000000 --- a/line_profiler/scoping_policy.pyi +++ /dev/null @@ -1,50 +0,0 @@ -from enum import auto -from types import FunctionType, ModuleType -from typing import overload, Literal, Callable, TypedDict -from .line_profiler_utils import StringEnum - - -class ScopingPolicy(StringEnum): - EXACT = auto() - CHILDREN = auto() - DESCENDANTS = auto() - SIBLINGS = auto() - NONE = auto() - - @overload - def get_filter( - self, - namespace: type | ModuleType, - obj_type: Literal['func']) -> Callable[[FunctionType], bool]: - ... - - @overload - def get_filter( - self, - namespace: type | ModuleType, - obj_type: Literal['class']) -> Callable[[type], bool]: - ... - - @overload - def get_filter( - self, - namespace: type | ModuleType, - obj_type: Literal['module']) -> Callable[[ModuleType], bool]: - ... - - @classmethod - def to_policies( - cls, - policies: (str | 'ScopingPolicy' | 'ScopingPolicyDict' - | None) = None) -> '_ScopingPolicyDict': - ... - - -ScopingPolicyDict = TypedDict('ScopingPolicyDict', - {'func': str | ScopingPolicy, - 'class': str | ScopingPolicy, - 'module': str | ScopingPolicy}) -_ScopingPolicyDict = TypedDict('_ScopingPolicyDict', - {'func': str | ScopingPolicy, - 'class': str | ScopingPolicy, - 'module': str | ScopingPolicy}) diff --git a/line_profiler/toml_config.py b/line_profiler/toml_config.py index 06d0870c..71370b07 100644 --- a/line_profiler/toml_config.py +++ b/line_profiler/toml_config.py @@ -2,6 +2,8 @@ Read and resolve user-supplied TOML files and combine them with the default to generate configurations. """ +from __future__ import annotations + import copy import dataclasses import functools @@ -14,7 +16,9 @@ except ImportError: # Python < 3.11 import tomli as tomllib # type: ignore[no-redef] # noqa: F811 from collections.abc import Mapping -from typing import Dict, List, Any +from os import PathLike +from typing import Any, Mapping as TypingMapping, Sequence, TypeVar +from typing_extensions import Self __all__ = ['ConfigSource'] @@ -23,7 +27,7 @@ TARGETS = 'line_profiler.toml', 'pyproject.toml' ENV_VAR = 'LINE_PROFILER_RC' -_DEFAULTS = None +_DEFAULTS: ConfigSource | None = None @dataclasses.dataclass @@ -45,11 +49,11 @@ class ConfigSource: :py:attr:`~.ConfigSource.path` :py:attr:`~.ConfigSource.conf_dict` can be found. """ - conf_dict: Dict[str, Any] + conf_dict: dict[str, Any] path: pathlib.Path - subtable: List[str] + subtable: list[str] - def copy(self): + def copy(self) -> Self: """ Returns: Copy of the object. @@ -57,7 +61,8 @@ def copy(self): return type(self)( copy.deepcopy(self.conf_dict), self.path, self.subtable.copy()) - def get_subconfig(self, *headers, allow_absence=False, copy=False): + def get_subconfig(self, *headers: str, allow_absence: bool = False, + copy: bool = False) -> Self: """ Arguments: headers (str): @@ -93,7 +98,7 @@ def get_subconfig(self, *headers, allow_absence=False, copy=False): return type(self)(new_dict, self.path, new_subtable) @classmethod - def from_default(cls, *, copy=True): + def from_default(cls, *, copy: bool = True) -> Self: """ Get the default TOML configuration that ships with the package. @@ -130,7 +135,8 @@ def find_file(anc, *chunks): return _DEFAULTS.copy() @classmethod - def from_config(cls, config=None, *, read_env=True): + def from_config(cls, config: str | PathLike | bool | None = None, *, + read_env: bool = True) -> Self: """ Create an instance by loading from a config file. @@ -187,7 +193,7 @@ def from_config(cls, config=None, *, read_env=True): configuration (see :py:meth:`~.ConfigSource.from_default`). """ - def merge(template, supplied): + def merge(template: dict[str, Any], supplied: dict[str, Any]): if not (isinstance(template, dict) and isinstance(supplied, dict)): return supplied result = {} @@ -257,8 +263,16 @@ def merge(template, supplied): merge(default_instance.conf_dict, conf), source, list(NAMESPACE)) +Config = tuple[dict[str, dict[str, Any]], pathlib.Path] +K = TypeVar('K') +V = TypeVar('V') +NestedTable = TypingMapping[K, 'NestedTable[K, V]' | V] + + def find_and_read_config_file( - *, config=None, env_var=ENV_VAR, targets=TARGETS): + *, config: str | PathLike | None = None, + env_var: str | None = ENV_VAR, + targets: Sequence[str | PathLike] = TARGETS) -> Config | None: """ Arguments: config (str | os.PathLike[str] | None): @@ -308,7 +322,8 @@ def iter_configs(dir_path): return None -def get_subtable(table, keys, *, allow_absence=True): +def get_subtable(table: NestedTable[K, V], keys: Sequence[K], *, + allow_absence: bool = True) -> NestedTable[K, V]: """ Arguments: table (Mapping): @@ -354,7 +369,8 @@ def get_subtable(table, keys, *, allow_absence=True): return subtable -def get_headers(table, *, include_implied=False): +def get_headers(table: NestedTable[K, Any], *, + include_implied: bool = False) -> set[tuple[K, ...]]: """ Arguments: table (Mapping): diff --git a/line_profiler/toml_config.pyi b/line_profiler/toml_config.pyi deleted file mode 100644 index 93409341..00000000 --- a/line_profiler/toml_config.pyi +++ /dev/null @@ -1,54 +0,0 @@ -from dataclasses import dataclass -from os import PathLike -from pathlib import Path -from typing import Mapping, Sequence, Any, Self, TypeVar - - -TARGETS = 'line_profiler.toml', 'pyproject.toml' -ENV_VAR = 'LINE_PROFILER_RC' - -K = TypeVar('K') -V = TypeVar('V') -Config = tuple[dict[str, dict[str, Any]], Path] -NestedTable = Mapping[K, 'NestedTable[K, V]' | V] - - -@dataclass -class ConfigSource: - conf_dict: dict[str, Any] - path: Path - subtable: list[str] - - def copy(self) -> Self: - ... - - def get_subconfig(self, *headers: str, - allow_absence: bool = False, copy: bool = False) -> Self: - ... - - @classmethod - def from_default(cls, *, copy: bool = True) -> Self: - ... - - @classmethod - def from_config(cls, config: str | PathLike | bool | None = None, *, - read_env: bool = True) -> Self: - ... - - -def find_and_read_config_file( - *, - config: str | PathLike | None = None, - env_var: str | None = ENV_VAR, - targets: Sequence[str | PathLike] = TARGETS) -> Config: - ... - - -def get_subtable(table: NestedTable[K, V], keys: Sequence[K], *, - allow_absence: bool = True) -> NestedTable[K, V]: - ... - - -def get_headers(table: NestedTable[K, Any], *, - include_implied: bool = False) -> set[tuple[K, ...]]: - ... diff --git a/requirements/runtime.txt b/requirements/runtime.txt index fe0df55e..85d1f5c2 100644 --- a/requirements/runtime.txt +++ b/requirements/runtime.txt @@ -1 +1,2 @@ tomli; python_version < '3.11' +typing_extensions