11from __future__ import annotations
22
3+ from textual .css .parse import substitute_references
4+
35__all__ = ["MarkupError" , "escape" , "to_content" ]
46
57import re
1719 Union ,
1820)
1921
22+ from textual ._context import active_app
23+ from textual .color import Color
2024from textual .css .tokenize import (
2125 COLOR ,
2226 PERCENT ,
2327 TOKEN ,
2428 VARIABLE_REF ,
2529 Expect ,
2630 TokenizerState ,
31+ tokenize_values ,
2732)
2833from textual .style import Style
2934
@@ -56,8 +61,7 @@ class MarkupError(Exception):
5661expect_markup_expression = Expect (
5762 "markup" ,
5863 end_tag = r"(?<!\\)\]" ,
59- word = r"\w+" ,
60- period = r"\." ,
64+ word = r"[\w\.]+" ,
6165 round_start = r"\(" ,
6266 round_end = r"\)" ,
6367 square_start = r"\[" ,
@@ -74,7 +78,7 @@ class MarkupError(Exception):
7478class MarkupTokenizer (TokenizerState ):
7579 """Tokenizes Textual markup."""
7680
77- EXPECT = expect_markup .expect_eof (True )
81+ EXPECT = expect_markup .expect_eof ()
7882 STATE_MAP = {
7983 "open_tag" : expect_markup_tag ,
8084 "open_closing_tag" : expect_markup_tag ,
@@ -93,6 +97,142 @@ class MarkupTokenizer(TokenizerState):
9397 }
9498
9599
100+ expect_style = Expect (
101+ "style token" ,
102+ end_tag = r"(?<!\\)\]" ,
103+ key = r"[@a-zA-Z_-][a-zA-Z0-9_-]*=" ,
104+ percent = PERCENT ,
105+ color = COLOR ,
106+ token = TOKEN ,
107+ variable_ref = VARIABLE_REF ,
108+ whitespace = r"\s+" ,
109+ double_string = r"\".*?\"" ,
110+ single_string = r"'.*?'" ,
111+ )
112+
113+
114+ class StyleTokenizer (TokenizerState ):
115+ """Tokenizes a style"""
116+
117+ EXPECT = expect_style .expect_eof ()
118+ STATE_MAP = {"key" : expect_markup_expression .expect_eof ()}
119+ STATE_PUSH = {
120+ "round_start" : expect_markup_expression ,
121+ "square_start" : expect_markup_expression ,
122+ "curly_start" : expect_markup_expression ,
123+ }
124+
125+
126+ STYLES = {"bold" , "dim" , "italic" , "underline" , "reverse" , "strike" }
127+ STYLE_ABBREVIATIONS = {
128+ "b" : "bold" ,
129+ "d" : "dim" ,
130+ "i" : "italic" ,
131+ "u" : "underline" ,
132+ "r" : "reverse" ,
133+ "s" : "strike" ,
134+ }
135+
136+
137+ def parse_style (style : str , variables : dict [str , str ] | None = None ) -> Style :
138+
139+ styles : dict [str , bool | None ] = {}
140+ color : Color | None = None
141+ background : Color | None = None
142+ is_background : bool = False
143+ style_state : bool = True
144+
145+ tokenizer = StyleTokenizer ()
146+ meta = {}
147+
148+ if variables is None :
149+ try :
150+ app = active_app .get ()
151+ except LookupError :
152+ reference_tokens = {}
153+ else :
154+ reference_tokens = app .stylesheet ._variable_tokens
155+ else :
156+ reference_tokens = tokenize_values (variables )
157+
158+ iter_tokens = iter (
159+ substitute_references (
160+ tokenizer (style , ("inline style" , "" )),
161+ reference_tokens ,
162+ )
163+ )
164+
165+ for token in iter_tokens :
166+ print (repr (token ))
167+ token_name = token .name
168+ token_value = token .value
169+ if token_name == "key" :
170+ key = token_value .rstrip ("=" )
171+ parenthesis : list [str ] = []
172+ value_text : list [str ] = []
173+ first_token = next (iter_tokens )
174+ if first_token .name in {"double_string" , "single_string" }:
175+ meta [key ] = first_token .value [1 :- 1 ]
176+ else :
177+ for token in iter_tokens :
178+ print ("\t " , repr (token ))
179+ if token .name == "whitespace" and not parenthesis :
180+ break
181+ value_text .append (token .value )
182+ if token .name in {"round_start" , "square_start" , "curly_start" }:
183+ parenthesis .append (token .value )
184+ elif token .name in {"round_end" , "square_end" , "curly_end" }:
185+ parenthesis .pop ()
186+ if not parenthesis :
187+ break
188+ tokenizer .expect (StyleTokenizer .EXPECT )
189+
190+ value = "" .join (value_text )
191+ meta [key ] = value
192+
193+ elif token_name == "color" :
194+ if is_background :
195+ background = Color .parse (token .value )
196+ else :
197+ color = Color .parse (token .value )
198+
199+ elif token_name == "token" :
200+ if token_value == "on" :
201+ is_background = True
202+ elif token_value == "auto" :
203+ if is_background :
204+ background = Color .automatic ()
205+ else :
206+ color = Color .automatic ()
207+ elif token_value == "not" :
208+ style_state = False
209+ elif token_value in STYLES :
210+ styles [token_value ] = style_state
211+ style_state = True
212+ elif token_value in STYLE_ABBREVIATIONS :
213+ styles [STYLE_ABBREVIATIONS [token_value ]] = style_state
214+ style_state = True
215+ else :
216+ if is_background :
217+ background = Color .parse (token_value )
218+ else :
219+ color = Color .parse (token_value )
220+
221+ elif token_name == "percent" :
222+ percent = int (token_value .rstrip ("%" )) / 100.0
223+ if is_background :
224+ if background is not None :
225+ background = background .multiply_alpha (percent )
226+ else :
227+ if color is not None :
228+ color = color .multiply_alpha (percent )
229+
230+ parsed_style = Style (background , color , link = meta .get ("link" , None ), ** styles )
231+ if meta :
232+ parsed_style += Style .from_meta (meta )
233+ return parsed_style
234+
235+
96236RE_TAGS = re .compile (
97237 r"""((\\*)\[([\$a-z#/@][^[]*?)])""" ,
98238 re .VERBOSE ,
@@ -329,16 +469,16 @@ def to_content(markup: str, style: str | Style = "") -> Content:
329469 position = 0
330470 tag_text : list [str ]
331471 for token in iter_tokens :
332- print ( repr ( token ))
472+
333473 token_name = token .name
334474 if token_name == "text" :
335475 text .append (token .value )
336476 position += len (token .value )
337477 elif token_name == "open_tag" :
338478 tag_text = []
339- print ( "open" )
479+
340480 for token in iter_tokens :
341- print ( " " , repr ( token ))
481+
342482 if token .name == "end_tag" :
343483 break
344484 tag_text .append (token .value )
@@ -347,9 +487,9 @@ def to_content(markup: str, style: str | Style = "") -> Content:
347487
348488 elif token_name == "open_closing_tag" :
349489 tag_text = []
350- print ( "closing" )
490+
351491 for token in iter_tokens :
352- print ( " " , repr ( token ))
492+
353493 if token .name == "end_tag" :
354494 break
355495 tag_text .append (token .value )
@@ -363,16 +503,16 @@ def to_content(markup: str, style: str | Style = "") -> Content:
363503
364504 else :
365505 open_position , tag = style_stack .pop ()
366- spans .append (Span (open_position , position , tag ))
506+ spans .append (Span (open_position , position , Style . parse ( tag ) ))
367507
368508 content_text = "" .join (text )
369509 text_length = len (content_text )
370510 while style_stack :
371511 position , tag = style_stack .pop ()
372- spans .append (Span (position , text_length , tag ))
512+ spans .append (Span (position , text_length , Style . parse ( tag ) ))
373513
374514 content = Content (content_text , spans )
375- print ( repr ( content ))
515+
376516 return content
377517
378518
0 commit comments