Skip to content

Commit 460f580

Browse files
fix: patch sphinx_github_style lexer IndexError on last token
The GitHubLexer.get_tokens_unprocessed method accesses tokens[idx+1] without bounds checking, causing IndexError when a builtin class or Name token is the last in the list. This is a bug in sphinx-github-style 1.2.2 (latest). Added a monkey-patch in conf.py to add the missing bounds check.
1 parent 80c1d09 commit 460f580

File tree

1 file changed

+29
-0
lines changed

1 file changed

+29
-0
lines changed

docs/conf.py

Lines changed: 29 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,35 @@
77

88
sys.path.insert(0, str(Path(__file__).parent / "src"))
99

10+
# --- Monkey-patch sphinx_github_style lexer (upstream bug: IndexError on last token) -----------
11+
import sphinx_github_style.lexer as _lexer # noqa: E402
12+
from pygments.token import Name, Keyword # noqa: E402
13+
from pygments.lexers.python import PythonLexer as _PythonLexer # noqa: E402
14+
15+
16+
def _patched_get_tokens_unprocessed(self, text): # type: ignore[no-untyped-def]
17+
tokens = list(_PythonLexer.get_tokens_unprocessed(self, text))
18+
last = len(tokens) - 1
19+
for token_idx, (index, token, value) in enumerate(tokens):
20+
if token is Name.Builtin and value in _lexer.BUILTINS["classes"]:
21+
if token_idx < last and tokens[token_idx + 1][-1] == "(":
22+
yield index, Name.Builtin, value
23+
else:
24+
yield index, Name.Builtin.Pseudo, value
25+
elif token is Name:
26+
if value[0].isupper():
27+
yield index, Name.Class, value
28+
elif token_idx < last and tokens[token_idx + 1][-1] == "(":
29+
yield index, Keyword.Pseudo, value
30+
else:
31+
yield index, Name, value
32+
else:
33+
yield index, token, value
34+
35+
36+
_lexer.GitHubLexer.get_tokens_unprocessed = _patched_get_tokens_unprocessed
37+
# --- End monkey-patch --------------------------------------------------------------------------
38+
1039
nitpicky = True
1140
project = "python-erc7730"
1241
copyright = "2024, Ledger"

0 commit comments

Comments
 (0)