@@ -25,7 +25,7 @@ def _filter(elem):
25
25
# Create a list containing all links
26
26
links = [link .get ("href" ) for link in filter (_filter , soup .find_all ("a" , href = True ))]
27
27
if links :
28
- print (links )
28
+ print ("Checking" , links )
29
29
30
30
# Initialize list for broken links.
31
31
broken_links = []
@@ -50,17 +50,19 @@ def _validate_url(url):
50
50
def check_project (project ):
51
51
project_path = doc_folder + project
52
52
broken_links = {}
53
+ html_file_found = False
53
54
54
55
for dirName , subdirList , fileList in os .walk (project_path ):
55
56
for fname in fileList :
56
57
if fname .endswith (".html" ):
58
+ html_file_found = True
57
59
fpath = dirName + '/' + fname
58
60
59
61
file_broken_links = get_broken_links (fpath )
60
62
if file_broken_links :
61
63
broken_links [fpath ] = file_broken_links
62
64
63
- return broken_links
65
+ return html_file_found , broken_links
64
66
65
67
# main
66
68
@@ -76,7 +78,10 @@ def check_project(project):
76
78
]
77
79
78
80
for project in projects :
79
- broken_links .update (check_project (project ))
81
+ html_file_found , broken = check_project (project )
82
+ if not html_file_found :
83
+ print ("No .html file found in project " + project + ". Did you generate the docs?" )
84
+ broken_links .update (broken )
80
85
81
86
if len (broken_links ) > 0 :
82
87
print ("Dead links found!" )
0 commit comments