|
1 | | -from collections.abc import Generator, Iterable, Iterator |
| 1 | +from collections.abc import Callable, Generator, Iterable, Iterator |
2 | 2 | from typing import Any |
3 | 3 |
|
4 | 4 | from pygments.filter import Filter |
5 | 5 | from pygments.lexer import Lexer |
6 | 6 | from pygments.token import _TokenType |
7 | 7 |
|
8 | | -def find_filter_class(filtername): ... |
9 | | -def get_filter_by_name(filtername, **options): ... |
| 8 | +def find_filter_class(filtername: str) -> type[Filter]: ... |
| 9 | +def get_filter_by_name(filtername: str, **options: Any) -> Filter: ... |
10 | 10 | def get_all_filters() -> Generator[str, None, None]: ... |
11 | 11 |
|
12 | 12 | class CodeTagFilter(Filter): |
13 | 13 | tag_re: Any |
14 | | - def __init__(self, **options) -> None: ... |
| 14 | + def __init__(self, **options: Any) -> None: ... |
15 | 15 | def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ... |
16 | 16 |
|
17 | 17 | class SymbolFilter(Filter): |
18 | | - latex_symbols: Any |
19 | | - isabelle_symbols: Any |
20 | | - lang_map: Any |
21 | | - symbols: Any |
22 | | - def __init__(self, **options) -> None: ... |
| 18 | + latex_symbols: dict[str, str] |
| 19 | + isabelle_symbols: dict[str, str] |
| 20 | + lang_map: dict[str, dict[str, str]] |
| 21 | + symbols: dict[str, str] |
| 22 | + def __init__(self, **options: Any) -> None: ... |
23 | 23 | def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ... |
24 | 24 |
|
25 | 25 | class KeywordCaseFilter(Filter): |
26 | | - convert: Any |
27 | | - def __init__(self, **options) -> None: ... |
| 26 | + convert: Callable[[str], str] |
| 27 | + def __init__(self, **options: Any) -> None: ... |
28 | 28 | def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ... |
29 | 29 |
|
30 | 30 | class NameHighlightFilter(Filter): |
31 | | - names: Any |
32 | | - tokentype: Any |
33 | | - def __init__(self, **options) -> None: ... |
| 31 | + names: set[str] |
| 32 | + tokentype: _TokenType |
| 33 | + def __init__(self, **options: Any) -> None: ... |
34 | 34 | def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ... |
35 | 35 |
|
36 | 36 | class ErrorToken(Exception): ... |
37 | 37 |
|
38 | 38 | class RaiseOnErrorTokenFilter(Filter): |
39 | | - exception: Any |
40 | | - def __init__(self, **options) -> None: ... |
| 39 | + exception: type[Exception] |
| 40 | + def __init__(self, **options: Any) -> None: ... |
41 | 41 | def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ... |
42 | 42 |
|
43 | 43 | class VisibleWhitespaceFilter(Filter): |
44 | | - wstt: Any |
45 | | - def __init__(self, **options) -> None: ... |
| 44 | + wstt: bool |
| 45 | + def __init__(self, **options: Any) -> None: ... |
46 | 46 | def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ... |
47 | 47 |
|
48 | 48 | class GobbleFilter(Filter): |
49 | | - n: Any |
50 | | - def __init__(self, **options) -> None: ... |
51 | | - def gobble(self, value, left): ... |
| 49 | + n: int |
| 50 | + def __init__(self, **options: Any) -> None: ... |
| 51 | + def gobble(self, value: str, left: int) -> tuple[str, int]: ... |
52 | 52 | def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ... |
53 | 53 |
|
54 | 54 | class TokenMergeFilter(Filter): |
55 | | - def __init__(self, **options) -> None: ... |
| 55 | + def __init__(self, **options: Any) -> None: ... |
56 | 56 | def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ... |
57 | 57 |
|
58 | | -FILTERS: Any |
| 58 | +FILTERS: dict[str, type[Filter]] |
0 commit comments