-
Notifications
You must be signed in to change notification settings - Fork 1.3k
/
Copy pathhelper.py
159 lines (128 loc) · 6.24 KB
/
helper.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
import logging
import math
from itertools import chain
from pathlib import Path
def underline(title: str, character: str = "=") -> str:
return f"{title}\n{character * len(title)}"
def generate_title(filename: str) -> str:
with open(filename) as f:
# fine the first line that contains '###'
for line in f:
if '###' in line:
title = line[3:].strip()
break
assert title is not None, f"No title found in {filename}"
return underline(title)
def generate_examples():
root_dir = Path(__file__).parent.parent.parent.resolve()
ignore_list = {'__init__.py', 'quickstart_example.py'}
doc_dir = root_dir / "docs/source/examples"
# Source paths for LLMAPI examples
llmapi_script_dir = root_dir / "examples/llm-api"
llmapi_script_paths = sorted(
llmapi_script_dir.glob("*.py"),
# The autoPP example should be at the end since it is a preview example
key=lambda x: math.inf if 'llm_auto_parallel' in x.stem else 0)
llmapi_script_paths += sorted(llmapi_script_dir.glob("*.sh"))
llmapi_script_paths = [
i for i in llmapi_script_paths if i.name not in ignore_list
]
# Destination paths for LLMAPI examples
llmapi_doc_paths = [
doc_dir / f"{path.stem}.rst" for path in llmapi_script_paths
]
llmapi_script_base_url = "https://github.com/NVIDIA/TensorRT-LLM/tree/main/examples/llm-api"
# Path for trtllm-serve examples
serve_script_dir = root_dir / "examples/serve"
serve_script_paths = sorted(
chain(serve_script_dir.glob("*.py"), serve_script_dir.glob("*.sh")))
serve_script_paths = [
i for i in serve_script_paths if i.name not in ignore_list
]
serve_doc_paths = [
doc_dir / f"{path.stem}.rst" for path in serve_script_paths
]
serve_script_base_url = "https://github.com/NVIDIA/TensorRT-LLM/tree/main/examples/serve"
# Generate the example docs for each example script
def write_script(base_url, script_paths, doc_paths, extra_content=""):
for script_path, doc_path in zip(script_paths, doc_paths):
if script_path.name in ignore_list:
logging.warning(f"Ignoring file: {script_path.name}")
continue
script_url = f"{base_url}/{script_path.name}"
# Determine language based on file extension
language = "python" if script_path.suffix == ".py" else "bash"
# Make script_path relative to doc_path and call it include_path
include_path = '../../..' / script_path.relative_to(root_dir)
# For Python files, use generate_title to extract title from comments
# For shell scripts, use filename as title
if script_path.suffix == ".py":
title = generate_title(script_path)
else:
# Create a title from the filename (remove extension and replace underscores with spaces)
title_text = script_path.stem.replace('_', ' ').title()
title = underline(title_text)
content = (f"{title}\n\n"
f"{extra_content}"
f"Source {script_url}.\n\n"
f".. literalinclude:: {include_path}\n"
f" :language: {language}\n"
" :linenos:\n")
with open(doc_path, "w+") as f:
f.write(content)
# Generate the toctree for LLMAPI example scripts
write_script(llmapi_script_base_url, llmapi_script_paths, llmapi_doc_paths)
with open(doc_dir / "llm_examples_index.template.rst_") as f:
examples_index = f.read()
with open(doc_dir / "llm_api_examples.rst", "w+") as f:
example_docs = "\n ".join(path.stem for path in llmapi_script_paths)
f.write(examples_index.replace(r"%EXAMPLE_DOCS%", example_docs)\
.replace(r"%EXAMPLE_NAME%", "LLM Examples"))
# Generate the toctree for trtllm-serve example scripts
trtllm_serve_content = "Refer to the `trtllm-serve documentation <https://nvidia.github.io/TensorRT-LLM/commands/trtllm-serve.html>`_ for starting a server.\n\n"
write_script(serve_script_base_url, serve_script_paths, serve_doc_paths,
trtllm_serve_content)
with open(doc_dir / "trtllm_serve_examples.rst", "w+") as f:
example_docs = "\n ".join(path.stem for path in serve_script_paths)
f.write(examples_index.replace(r"%EXAMPLE_DOCS%", example_docs)\
.replace(r"%EXAMPLE_NAME%", "Online Serving Examples"))
with open(doc_dir / "index.rst") as f:
examples_index = f.read()
with open(doc_dir / "index.rst", "w+") as f:
example_docs = "\n ".join(path.stem for path in llmapi_script_paths)
f.write(examples_index.replace(r"%EXAMPLE_DOCS%", example_docs))
def extract_all_and_eval(file_path):
''' Extract the __all__ variable from a Python file.
This is a trick to make the CI happy even the tensorrt_llm lib is not available.
NOTE: This requires the __all__ variable to be defined at the end of the file.
'''
with open(file_path, 'r') as file:
content = file.read()
lines = content.split('\n')
filtered_line_begin = 0
for i, line in enumerate(lines):
if line.startswith("__all__"):
filtered_line_begin = i
break
code_to_eval = '\n'.join(lines[filtered_line_begin:])
local_vars = {}
exec(code_to_eval, {}, local_vars)
return local_vars
def generate_llmapi():
root_dir = Path(__file__).parent.parent.parent.resolve()
# Destination paths
doc_dir = root_dir / "docs/source/llm-api"
doc_dir.mkdir(exist_ok=True)
doc_path = doc_dir / "reference.rst"
llmapi_all_file = root_dir / "tensorrt_llm/llmapi/__init__.py"
public_classes_names = extract_all_and_eval(llmapi_all_file)['__all__']
content = underline("API Reference", "-") + "\n\n"
for cls_name in public_classes_names:
cls_name = cls_name.strip()
content += (f".. autoclass:: tensorrt_llm.llmapi.{cls_name}\n"
" :members:\n"
" :undoc-members:\n"
" :special-members: __init__\n"
" :show-inheritance:\n")
with open(doc_path, "w+") as f:
f.write(content)