Skip to content

Commit 381a1bc

Browse files
committed
style parse
1 parent 9eb73f4 commit 381a1bc

File tree

4 files changed

+174
-23
lines changed

4 files changed

+174
-23
lines changed

src/textual/_style_parse.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -93,7 +93,7 @@ def style_parse(style_text: str, variables: dict[str, str] | None) -> Style:
9393
if name in ("key_value_quote", "key_value_double_quote"):
9494
value = value[1:-1]
9595
if key.startswith("@"):
96-
meta[key[1:]] = value
96+
meta[key] = value
9797
else:
9898
data[key] = value
9999
elif name == "percent" or (name == "scalar" and value.endswith("%")):

src/textual/app.py

+6
Original file line numberDiff line numberDiff line change
@@ -4198,6 +4198,12 @@ def action_show_help_panel(self) -> None:
41984198
except NoMatches:
41994199
self.mount(HelpPanel())
42004200

4201+
def action_notify(
4202+
self, message: str, title: str = "", severity: str = "information"
4203+
) -> None:
4204+
"""Show a notification."""
4205+
self.notify(message, title=title, severity=severity)
4206+
42014207
def _on_terminal_supports_synchronized_output(
42024208
self, message: messages.TerminalSupportsSynchronizedOutput
42034209
) -> None:

src/textual/css/tokenize.py

+16-11
Original file line numberDiff line numberDiff line change
@@ -181,31 +181,36 @@ class TokenizerState:
181181
STATE_PUSH: ClassVar[dict[str, Expect]] = {}
182182
STATE_POP: ClassVar[dict[str, str]] = {}
183183

184+
def __init__(self) -> None:
185+
self._expect: Expect = self.EXPECT
186+
super().__init__()
187+
188+
def expect(self, expect: Expect) -> None:
189+
self._expect = expect
190+
184191
def __call__(self, code: str, read_from: CSSLocation) -> Iterable[Token]:
185192
tokenizer = Tokenizer(code, read_from=read_from)
186-
expect = self.EXPECT
187193
get_token = tokenizer.get_token
188194
get_state = self.STATE_MAP.get
189195
state_stack: list[Expect] = []
190196

191-
skip_get_token = False
192197
while True:
193-
if not skip_get_token:
194-
token = get_token(expect)
195-
skip_get_token = False
198+
expect = self._expect
199+
token = get_token(expect)
196200
name = token.name
197201
if name in self.STATE_MAP:
198-
expect = get_state(token.name, expect)
202+
self._expect = get_state(token.name, expect)
199203
elif name in self.STATE_PUSH:
200-
expect = self.STATE_PUSH[name]
204+
self._expect = self.STATE_PUSH[name]
201205
state_stack.append(expect)
202206
elif name in self.STATE_POP:
203-
expect_pop = self.STATE_POP[name]
204207
if state_stack:
205-
expect = state_stack.pop()
208+
self._expect = state_stack.pop()
206209
else:
207-
expect = self.EXPECT
208-
skip_get_token = True
210+
self._expect = self.EXPECT
211+
token = token._replace(name="end_tag")
212+
yield token
213+
continue
209214

210215
yield token
211216
if name == "eof":

src/textual/markup.py

+151-11
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,7 @@
11
from __future__ import annotations
22

3+
from textual.css.parse import substitute_references
4+
35
__all__ = ["MarkupError", "escape", "to_content"]
46

57
import re
@@ -17,13 +19,16 @@
1719
Union,
1820
)
1921

22+
from textual._context import active_app
23+
from textual.color import Color
2024
from textual.css.tokenize import (
2125
COLOR,
2226
PERCENT,
2327
TOKEN,
2428
VARIABLE_REF,
2529
Expect,
2630
TokenizerState,
31+
tokenize_values,
2732
)
2833
from textual.style import Style
2934

@@ -56,8 +61,7 @@ class MarkupError(Exception):
5661
expect_markup_expression = Expect(
5762
"markup",
5863
end_tag=r"(?<!\\)\]",
59-
word=r"\w+",
60-
period=r"\.",
64+
word=r"[\w\.]+",
6165
round_start=r"\(",
6266
round_end=r"\)",
6367
square_start=r"\[",
@@ -74,7 +78,7 @@ class MarkupError(Exception):
7478
class MarkupTokenizer(TokenizerState):
7579
"""Tokenizes Textual markup."""
7680

77-
EXPECT = expect_markup.expect_eof(True)
81+
EXPECT = expect_markup.expect_eof()
7882
STATE_MAP = {
7983
"open_tag": expect_markup_tag,
8084
"open_closing_tag": expect_markup_tag,
@@ -93,6 +97,142 @@ class MarkupTokenizer(TokenizerState):
9397
}
9498

9599

100+
expect_style = Expect(
101+
"style token",
102+
end_tag=r"(?<!\\)\]",
103+
key=r"[@a-zA-Z_-][a-zA-Z0-9_-]*=",
104+
percent=PERCENT,
105+
color=COLOR,
106+
token=TOKEN,
107+
variable_ref=VARIABLE_REF,
108+
whitespace=r"\s+",
109+
double_string=r"\".*?\"",
110+
single_string=r"'.*?'",
111+
)
112+
113+
114+
class StyleTokenizer(TokenizerState):
115+
"""Tokenizes a style"""
116+
117+
EXPECT = expect_style.expect_eof()
118+
STATE_MAP = {"key": expect_markup_expression.expect_eof()}
119+
STATE_PUSH = {
120+
"round_start": expect_markup_expression,
121+
"square_start": expect_markup_expression,
122+
"curly_start": expect_markup_expression,
123+
}
124+
125+
126+
STYLES = {"bold", "dim", "italic", "underline", "reverse", "strike"}
127+
STYLE_ABBREVIATIONS = {
128+
"b": "bold",
129+
"d": "dim",
130+
"i": "italic",
131+
"u": "underline",
132+
"r": "reverse",
133+
"s": "strike",
134+
}
135+
136+
137+
def parse_style(style: str, variables: dict[str, str] | None = None) -> Style:
138+
139+
styles: dict[str, bool | None] = {}
140+
color: Color | None = None
141+
background: Color | None = None
142+
is_background: bool = False
143+
style_state: bool = True
144+
145+
tokenizer = StyleTokenizer()
146+
meta = {}
147+
148+
if variables is None:
149+
try:
150+
app = active_app.get()
151+
except LookupError:
152+
reference_tokens = {}
153+
else:
154+
reference_tokens = app.stylesheet._variable_tokens
155+
else:
156+
reference_tokens = tokenize_values(variables)
157+
158+
iter_tokens = iter(
159+
substitute_references(
160+
tokenizer(style, ("inline style", "")),
161+
reference_tokens,
162+
)
163+
)
164+
165+
for token in iter_tokens:
166+
print(repr(token))
167+
token_name = token.name
168+
token_value = token.value
169+
if token_name == "key":
170+
key = token_value.rstrip("=")
171+
parenthesis: list[str] = []
172+
value_text: list[str] = []
173+
first_token = next(iter_tokens)
174+
if first_token.name in {"double_string", "single_string"}:
175+
meta[key] = first_token.value[1:-1]
176+
else:
177+
for token in iter_tokens:
178+
print("\t", repr(token))
179+
if token.name == "whitespace" and not parenthesis:
180+
break
181+
value_text.append(token.value)
182+
if token.name in {"round_start", "square_start", "curly_start"}:
183+
parenthesis.append(token.value)
184+
elif token.name in {"round_end", "square_end", "curly_end"}:
185+
parenthesis.pop()
186+
if not parenthesis:
187+
break
188+
tokenizer.expect(StyleTokenizer.EXPECT)
189+
190+
value = "".join(value_text)
191+
meta[key] = value
192+
193+
elif token_name == "color":
194+
if is_background:
195+
background = Color.parse(token.value)
196+
else:
197+
color = Color.parse(token.value)
198+
199+
elif token_name == "token":
200+
if token_value == "on":
201+
is_background = True
202+
elif token_value == "auto":
203+
if is_background:
204+
background = Color.automatic()
205+
else:
206+
color = Color.automatic()
207+
elif token_value == "not":
208+
style_state = False
209+
elif token_value in STYLES:
210+
styles[token_value] = style_state
211+
style_state = True
212+
elif token_value in STYLE_ABBREVIATIONS:
213+
styles[STYLE_ABBREVIATIONS[token_value]] = style_state
214+
style_state = True
215+
else:
216+
if is_background:
217+
background = Color.parse(token_value)
218+
else:
219+
color = Color.parse(token_value)
220+
221+
elif token_name == "percent":
222+
percent = int(token_value.rstrip("%")) / 100.0
223+
if is_background:
224+
if background is not None:
225+
background = background.multiply_alpha(percent)
226+
else:
227+
if color is not None:
228+
color = color.multiply_alpha(percent)
229+
230+
parsed_style = Style(background, color, link=meta.get("link", None), **styles)
231+
if meta:
232+
parsed_style += Style.from_meta(meta)
233+
return parsed_style
234+
235+
96236
RE_TAGS = re.compile(
97237
r"""((\\*)\[([\$a-z#/@][^[]*?)])""",
98238
re.VERBOSE,
@@ -329,16 +469,16 @@ def to_content(markup: str, style: str | Style = "") -> Content:
329469
position = 0
330470
tag_text: list[str]
331471
for token in iter_tokens:
332-
print(repr(token))
472+
333473
token_name = token.name
334474
if token_name == "text":
335475
text.append(token.value)
336476
position += len(token.value)
337477
elif token_name == "open_tag":
338478
tag_text = []
339-
print("open")
479+
340480
for token in iter_tokens:
341-
print(" ", repr(token))
481+
342482
if token.name == "end_tag":
343483
break
344484
tag_text.append(token.value)
@@ -347,9 +487,9 @@ def to_content(markup: str, style: str | Style = "") -> Content:
347487

348488
elif token_name == "open_closing_tag":
349489
tag_text = []
350-
print("closing")
490+
351491
for token in iter_tokens:
352-
print(" ", repr(token))
492+
353493
if token.name == "end_tag":
354494
break
355495
tag_text.append(token.value)
@@ -363,16 +503,16 @@ def to_content(markup: str, style: str | Style = "") -> Content:
363503

364504
else:
365505
open_position, tag = style_stack.pop()
366-
spans.append(Span(open_position, position, tag))
506+
spans.append(Span(open_position, position, Style.parse(tag)))
367507

368508
content_text = "".join(text)
369509
text_length = len(content_text)
370510
while style_stack:
371511
position, tag = style_stack.pop()
372-
spans.append(Span(position, text_length, tag))
512+
spans.append(Span(position, text_length, Style.parse(tag)))
373513

374514
content = Content(content_text, spans)
375-
print(repr(content))
515+
376516
return content
377517

378518

0 commit comments

Comments
 (0)