|
19 | 19 |
|
20 | 20 | import os
|
21 | 21 | import sys
|
| 22 | +from pygments.lexer import RegexLexer, words |
| 23 | +from pygments import token |
| 24 | +from sphinx.highlighting import lexers |
22 | 25 |
|
23 | 26 | dir_path = os.path.dirname(os.path.realpath(__file__))
|
24 | 27 | sys.path.append(dir_path)
|
|
28 | 31 | author = 'Raphael Amiard'
|
29 | 32 |
|
30 | 33 |
|
| 34 | +class LKQLPygmentsLexer(RegexLexer): |
| 35 | + """ |
| 36 | + Pygments lexer for LKQL |
| 37 | + """ |
| 38 | + name = 'LKQL' |
| 39 | + filenames = ['*.lkql'] |
| 40 | + |
| 41 | + tokens = { |
| 42 | + 'root': [ |
| 43 | + (words(('select', 'let', 'when', 'val', 'fun', 'selector', |
| 44 | + 'match', 'rec', 'skip', 'is', 'in', 'true', 'false', |
| 45 | + 'if', 'else', 'then', 'not', 'null'), |
| 46 | + prefix=r'\b', suffix=r'\b'), |
| 47 | + token.Keyword), |
| 48 | + (r"#(.?)+", token.Comment), |
| 49 | + (r"(\-\>|=|\=\>|\<\=|\>\=|\=|\!\=|\+|\-|\*|\/|\&|" |
| 50 | + r"\@|\||\>|\<)", token.Operator), |
| 51 | + (r"\b(and|or|not)\b", token.Operator), |
| 52 | + (r"\{|\}|\(|\)|\[|\]|;|\.|,", token.Punctuation), |
| 53 | + (r"\"[^\"]*\"", token.String), |
| 54 | + (r'[0-9]+', token.Number), |
| 55 | + (r'_?[a-zA-Z][\w\']*', token.Name), |
| 56 | + (r'\n', token.Text), |
| 57 | + (r'[^\S\n]+', token.Text), |
| 58 | + ] |
| 59 | + } |
| 60 | + |
| 61 | +lexers['lkql'] = LKQLPygmentsLexer() |
| 62 | + |
31 | 63 | # -- General configuration ---------------------------------------------------
|
32 | 64 |
|
33 | 65 | # Add any Sphinx extension module names here, as strings. They can be
|
|
0 commit comments