Skip to content

Commit 2baf95f

Browse files
committed
Add all needed pyright:ignore statements - to be removed in the future, one by one
1 parent 3f50609 commit 2baf95f

25 files changed

+240
-171
lines changed

Diff for: src/zimscraperlib/download.py

+14-10
Original file line numberDiff line numberDiff line change
@@ -52,14 +52,16 @@ def download(
5252
5353
Returns download result of future (wait=False)"""
5454

55-
future = self.executor.submit(self._run_youtube_dl, url, options)
55+
future = self.executor.submit(
56+
self._run_youtube_dl, url, options # pyright: ignore
57+
)
5658
if not wait:
5759
return future
5860
if not future.exception():
5961
# return the result
60-
return future.result()
62+
return future.result() # pyright: ignore
6163
# raise the exception
62-
raise future.exception()
64+
raise future.exception() # pyright: ignore
6365

6466

6567
class YoutubeConfig(dict):
@@ -137,8 +139,10 @@ def save_large_file(url: str, fpath: pathlib.Path) -> None:
137139
)
138140

139141

140-
def _get_retry_adapter(max_retries: Optional[int] = 5) -> requests.adapters.BaseAdapter:
141-
retries = requests.packages.urllib3.util.retry.Retry(
142+
def _get_retry_adapter(
143+
max_retries: Optional[int] = 5,
144+
) -> requests.adapters.BaseAdapter: # pyright: ignore
145+
retries = requests.packages.urllib3.util.retry.Retry( # pyright: ignore
142146
total=max_retries, # total number of retries
143147
connect=max_retries, # connection errors
144148
read=max_retries, # read errors
@@ -155,7 +159,7 @@ def _get_retry_adapter(max_retries: Optional[int] = 5) -> requests.adapters.Base
155159
], # force retry on the following codes
156160
)
157161

158-
return requests.adapters.HTTPAdapter(max_retries=retries)
162+
return requests.adapters.HTTPAdapter(max_retries=retries) # pyright: ignore
159163

160164

161165
def get_session(max_retries: Optional[int] = 5) -> requests.Session:
@@ -175,7 +179,7 @@ def stream_file(
175179
max_retries: Optional[int] = 5,
176180
headers: Optional[Dict[str, str]] = None,
177181
session: Optional[requests.Session] = None,
178-
) -> tuple[int, requests.structures.CaseInsensitiveDict]:
182+
) -> tuple[int, requests.structures.CaseInsensitiveDict]: # pyright: ignore
179183
"""Stream data from a URL to either a BytesIO object or a file
180184
Arguments -
181185
fpath - Path of the file where data is sent
@@ -211,7 +215,7 @@ def stream_file(
211215

212216
for data in resp.iter_content(block_size):
213217
total_downloaded += len(data)
214-
fp.write(data)
218+
fp.write(data) # pyright: ignore
215219

216220
# stop downloading/reading if we're just testing first block
217221
if only_first_block:
@@ -220,7 +224,7 @@ def stream_file(
220224
logger.debug(f"Downloaded {total_downloaded} bytes from {url}")
221225

222226
if fpath:
223-
fp.close()
227+
fp.close() # pyright: ignore
224228
else:
225-
fp.seek(0)
229+
fp.seek(0) # pyright: ignore
226230
return total_downloaded, resp.headers

Diff for: src/zimscraperlib/html.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@ def find_title_in(content: Union[str, BinaryIO, TextIO], mime_type: str) -> str:
1818
if mime_type != ARTICLE_MIME:
1919
return ""
2020
try:
21-
return BeautifulSoup(content, "lxml").find("title").text
21+
return BeautifulSoup(content, "lxml").find("title").text # pyright: ignore
2222
except Exception:
2323
return ""
2424

@@ -44,15 +44,15 @@ def find_language_in(content: Union[str, BinaryIO, TextIO], mime_type: str) -> s
4444
for key in keylist:
4545
node = soup.find(nodename)
4646
if node:
47-
if not node.has_attr(key):
47+
if not node.has_attr(key): # pyright: ignore
4848
continue
4949
if (
5050
nodename == "meta"
51-
and not node.attrs.get("http-equiv", "").lower()
51+
and not node.attrs.get("http-equiv", "").lower() # pyright: ignore
5252
== "content-language"
5353
):
5454
continue
55-
return node.attrs[key]
55+
return node.attrs[key] # pyright: ignore
5656
return ""
5757

5858

Diff for: src/zimscraperlib/i18n.py

+8-4
Original file line numberDiff line numberDiff line change
@@ -102,15 +102,19 @@ def find_language_names(
102102
lang_data = get_language_details(query, failsafe=True) or {}
103103
try:
104104
query_locale = babel.Locale.parse(query)
105-
return query_locale.get_display_name(), query_locale.get_display_name("en")
105+
return query_locale.get_display_name(), query_locale.get_display_name(
106+
"en"
107+
) # pyright: ignore
106108
except (babel.UnknownLocaleError, TypeError, ValueError, AttributeError):
107109
pass
108110

109111
# ISO code lookup order matters (most qualified first)!
110112
for iso_level in [f"iso-639-{lang_}" for lang_ in reversed(ISO_LEVELS)]:
111113
try:
112114
query_locale = babel.Locale.parse(lang_data.get(iso_level))
113-
return query_locale.get_display_name(), query_locale.get_display_name("en")
115+
return query_locale.get_display_name(), query_locale.get_display_name(
116+
"en"
117+
) # pyright: ignore
114118
except (babel.UnknownLocaleError, TypeError, ValueError, AttributeError):
115119
pass
116120
default = lang_data.get("english", query)
@@ -166,10 +170,10 @@ def get_language_details(
166170
lang_data, macro_data = get_iso_lang_data(adjusted_query)
167171
except NotFound as exc:
168172
if failsafe:
169-
return None
173+
return None # pyright: ignore
170174
raise exc
171175

172-
iso_data = update_with_macro(lang_data, macro_data)
176+
iso_data = update_with_macro(lang_data, macro_data) # pyright: ignore
173177
native_name, english_name = find_language_names(native_query, iso_data)
174178
iso_data.update(
175179
{

Diff for: src/zimscraperlib/image/convertion.py

+6-4
Original file line numberDiff line numberDiff line change
@@ -25,10 +25,12 @@ def convert_image(
2525
to RGB. ex: RGB, ARGB, CMYK (and other PIL colorspaces)"""
2626

2727
colorspace = params.get("colorspace") # requested colorspace
28-
fmt = params.pop("fmt").upper() if "fmt" in params else None # requested format
28+
fmt = (
29+
params.pop("fmt").upper() if "fmt" in params else None # pyright: ignore
30+
) # requested format
2931
if not fmt:
3032
fmt = format_for(dst)
31-
with PIL.Image.open(src) as image:
33+
with PIL.Image.open(src) as image: # pyright: ignore
3234
if image.mode == "RGBA" and fmt in ALPHA_NOT_SUPPORTED or colorspace:
3335
image = image.convert(colorspace or "RGB") # noqa: PLW2901
3436
save_image(image, dst, fmt, **params)
@@ -39,13 +41,13 @@ def create_favicon(src: pathlib.Path, dst: pathlib.Path) -> None:
3941
if dst.suffix != ".ico":
4042
raise ValueError("favicon extension must be ICO")
4143

42-
img = PIL.Image.open(src)
44+
img = PIL.Image.open(src) # pyright: ignore
4345
w, h = img.size
4446
# resize image to square first
4547
if w != h:
4648
size = min([w, h])
4749
resized = dst.parent.joinpath(f"{src.stem}.tmp.{src.suffix}")
4850
resize_image(src, size, size, resized, "contain")
49-
img = PIL.Image.open(resized)
51+
img = PIL.Image.open(resized) # pyright: ignore
5052
# now convert to ICO
5153
save_image(img, dst, "ICO")

Diff for: src/zimscraperlib/image/optimization.py

+23-21
Original file line numberDiff line numberDiff line change
@@ -76,25 +76,25 @@ def optimize_png(
7676
if remove_transparency is True (tuple containing RGB values)
7777
values: (255, 255, 255) | (221, 121, 108) | (XX, YY, ZZ)"""
7878

79-
ensure_matches(src, "PNG")
79+
ensure_matches(src, "PNG") # pyright: ignore
8080

8181
img = Image.open(src)
8282

8383
if remove_transparency:
84-
img = remove_alpha(img, background_color)
84+
img = remove_alpha(img, background_color) # pyright: ignore
8585

8686
if reduce_colors:
87-
img, _, _ = do_reduce_colors(img, max_colors)
87+
img, _, _ = do_reduce_colors(img, max_colors) # pyright: ignore
8888

8989
if not fast_mode and img.mode == "P":
9090
img, _ = rebuild_palette(img)
9191

9292
if dst is None:
93-
dst = io.BytesIO()
94-
img.save(dst, optimize=True, format="PNG")
93+
dst = io.BytesIO() # pyright: ignore
94+
img.save(dst, optimize=True, format="PNG") # pyright: ignore
9595
if isinstance(dst, io.BytesIO):
9696
dst.seek(0)
97-
return dst
97+
return dst # pyright: ignore
9898

9999

100100
def optimize_jpeg(
@@ -114,7 +114,7 @@ def optimize_jpeg(
114114
get dynamic quality value to ensure better compression
115115
values: True | False"""
116116

117-
ensure_matches(src, "JPEG")
117+
ensure_matches(src, "JPEG") # pyright: ignore
118118

119119
img = Image.open(src)
120120
orig_size = (
@@ -138,10 +138,10 @@ def optimize_jpeg(
138138
quality_setting, _ = jpeg_dynamic_quality(img)
139139

140140
if dst is None:
141-
dst = io.BytesIO()
141+
dst = io.BytesIO() # pyright: ignore
142142

143143
img.save(
144-
dst,
144+
dst, # pyright: ignore
145145
quality=quality_setting,
146146
optimize=True,
147147
progressive=use_progressive_jpg,
@@ -157,12 +157,14 @@ def optimize_jpeg(
157157
str(src.resolve()) if isinstance(src, pathlib.Path) else src.getvalue()
158158
),
159159
image=(
160-
str(dst.resolve()) if isinstance(dst, pathlib.Path) else dst.getvalue()
160+
str(dst.resolve())
161+
if isinstance(dst, pathlib.Path)
162+
else dst.getvalue() # pyright: ignore
161163
),
162164
new_file=dst,
163165
)
164166

165-
return dst
167+
return dst # pyright: ignore
166168

167169

168170
def optimize_webp(
@@ -186,7 +188,7 @@ def optimize_webp(
186188
refer to the link for more details
187189
https://pillow.readthedocs.io/en/stable/handbook/image-file-formats.html#webp"""
188190

189-
ensure_matches(src, "WEBP")
191+
ensure_matches(src, "WEBP") # pyright: ignore
190192
params = {
191193
"lossless": lossless,
192194
"quality": quality,
@@ -195,17 +197,17 @@ def optimize_webp(
195197

196198
webp_image = Image.open(src)
197199
if dst is None:
198-
dst = io.BytesIO()
199-
webp_image.save(dst, format="WEBP", **params)
200-
dst.seek(0)
200+
dst = io.BytesIO() # pyright: ignore
201+
webp_image.save(dst, format="WEBP", **params) # pyright: ignore
202+
dst.seek(0) # pyright: ignore
201203
else:
202204
try:
203-
save_image(webp_image, dst, fmt="WEBP", **params)
205+
save_image(webp_image, dst, fmt="WEBP", **params) # pyright: ignore
204206
except Exception as exc:
205-
if src.resolve() != dst.resolve() and dst.exists():
207+
if src.resolve() != dst.resolve() and dst.exists(): # pyright: ignore
206208
dst.unlink() # pragma: nocover
207209
raise exc
208-
return dst
210+
return dst # pyright: ignore
209211

210212

211213
def optimize_gif(
@@ -267,7 +269,7 @@ def optimize_image(
267269
delete_src: Optional[bool] = False, # noqa: FBT002
268270
convert: Optional[Union[bool, str]] = False, # noqa: FBT002
269271
**options,
270-
) -> bool:
272+
) -> bool: # pyright: ignore
271273
"""Optimize image, automatically selecting correct optimizer
272274
273275
delete_src: whether to remove src file upon success (boolean)
@@ -281,12 +283,12 @@ def optimize_image(
281283
# if requested, convert src to requested format into dst path
282284
if convert and src_format != dst_format:
283285
src_format = dst_format = convert if isinstance(convert, str) else dst_format
284-
convert_image(src, dst, fmt=src_format)
286+
convert_image(src, dst, fmt=src_format) # pyright: ignore
285287
src_img = pathlib.Path(dst)
286288
else:
287289
src_img = pathlib.Path(src)
288290

289-
{
291+
{ # pyright: ignore
290292
"JPEG": optimize_jpeg,
291293
"PNG": optimize_png,
292294
"GIF": optimize_gif,

Diff for: src/zimscraperlib/image/probing.py

+5-3
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,7 @@ def solarize(r: int, g: int, b: int) -> Tuple[int, int, int]:
4747

4848
def is_hex_color(text: str) -> bool:
4949
"""whether supplied text is a valid hex-formated color code"""
50-
return re.search(r"^#(?:[0-9a-fA-F]{3}){1,2}$", text)
50+
return re.search(r"^#(?:[0-9a-fA-F]{3}){1,2}$", text) # pyright: ignore
5151

5252

5353
def format_for(
@@ -57,13 +57,15 @@ def format_for(
5757
"""Pillow format of a given filename, either Pillow-detected or from suffix"""
5858
if not from_suffix:
5959
with PIL.Image.open(src) as img:
60-
return img.format
60+
return img.format # pyright: ignore
6161

6262
from PIL.Image import EXTENSION as ext_fmt_map # noqa: N811
6363
from PIL.Image import init as init_pil
6464

6565
init_pil()
66-
return ext_fmt_map[src.suffix] # might raise KeyError on unknown extension
66+
return ext_fmt_map[
67+
src.suffix # pyright: ignore
68+
] # might raise KeyError on unknown extension
6769

6870

6971
def is_valid_image(

Diff for: src/zimscraperlib/image/transformation.py

+7-2
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ def resize_image(
2525
2626
methods: width, height, cover, thumbnail
2727
allow upscaling: upscale image first, preserving aspect ratio if required"""
28-
with PIL.Image.open(src) as image:
28+
with PIL.Image.open(src) as image: # pyright: ignore
2929
# preserve image format as resize() does not transmit it into new object
3030
image_format = image.format
3131
image_mode = image.mode
@@ -58,4 +58,9 @@ def resize_image(
5858
if dst is None and isinstance(src, io.BytesIO):
5959
src.seek(0)
6060

61-
save_image(resized, dst if dst is not None else src, image_format, **params)
61+
save_image(
62+
resized,
63+
dst if dst is not None else src, # pyright: ignore
64+
image_format,
65+
**params,
66+
)

Diff for: src/zimscraperlib/image/utils.py

+6-3
Original file line numberDiff line numberDiff line change
@@ -8,9 +8,12 @@
88

99

1010
def save_image(
11-
src: Image, dst: pathlib.Path, fmt: Optional[str] = None, **params: Optional[dict]
11+
src: Image, # pyright: ignore
12+
dst: pathlib.Path,
13+
fmt: Optional[str] = None,
14+
**params: Optional[dict],
1215
) -> None:
1316
"""PIL.Image.save() wrapper setting default parameters"""
14-
args = {"JPEG": {"quality": 100}, "PNG": {}}.get(fmt, {})
17+
args = {"JPEG": {"quality": 100}, "PNG": {}}.get(fmt, {}) # pyright: ignore
1518
args.update(params or {})
16-
src.save(dst, fmt, **args)
19+
src.save(dst, fmt, **args) # pyright: ignore

0 commit comments

Comments
 (0)