Skip to content

Commit edf9b73

Browse files
Change legacy string formatting to f-strings (#1374)
1 parent 7036b5a commit edf9b73

33 files changed

+124
-135
lines changed

fsspec/archive.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@ class AbstractArchiveFileSystem(AbstractFileSystem):
1313
"""
1414

1515
def __str__(self):
16-
return "<Archive-like object %s at %s>" % (type(self).__name__, id(self))
16+
return f"<Archive-like object {type(self).__name__} at {id(self)}>"
1717

1818
__repr__ = __str__
1919

fsspec/asyn.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -426,7 +426,7 @@ async def _process_limits(self, url, start, end):
426426
end = ""
427427
if isinstance(end, numbers.Integral):
428428
end -= 1 # bytes range is inclusive
429-
return "bytes=%s-%s" % (start, end)
429+
return f"bytes={start}-{end}"
430430

431431
async def _cat_file(self, path, start=None, end=None, **kwargs):
432432
raise NotImplementedError

fsspec/caching.py

+10-10
Original file line numberDiff line numberDiff line change
@@ -222,8 +222,9 @@ def __init__(self, blocksize, fetcher, size, maxblocks=32):
222222
self._fetch_block_cached = functools.lru_cache(maxblocks)(self._fetch_block)
223223

224224
def __repr__(self):
225-
return "<BlockCache blocksize={}, size={}, nblocks={}>".format(
226-
self.blocksize, self.size, self.nblocks
225+
return (
226+
f"<BlockCache blocksize={self.blocksize}, "
227+
f"size={self.size}, nblocks={self.nblocks}>"
227228
)
228229

229230
def cache_info(self):
@@ -277,9 +278,8 @@ def _fetch_block(self, block_number):
277278
"""
278279
if block_number > self.nblocks:
279280
raise ValueError(
280-
"'block_number={}' is greater than the number of blocks ({})".format(
281-
block_number, self.nblocks
282-
)
281+
f"'block_number={block_number}' is greater than "
282+
f"the number of blocks ({self.nblocks})"
283283
)
284284

285285
start = block_number * self.blocksize
@@ -606,8 +606,9 @@ def __init__(self, blocksize, fetcher, size, maxblocks=32):
606606
self._fetch_future_lock = threading.Lock()
607607

608608
def __repr__(self):
609-
return "<BackgroundBlockCache blocksize={}, size={}, nblocks={}>".format(
610-
self.blocksize, self.size, self.nblocks
609+
return (
610+
f"<BackgroundBlockCache blocksize={self.blocksize}, "
611+
f"size={self.size}, nblocks={self.nblocks}>"
611612
)
612613

613614
def cache_info(self):
@@ -719,9 +720,8 @@ def _fetch_block(self, block_number, log_info="sync"):
719720
"""
720721
if block_number > self.nblocks:
721722
raise ValueError(
722-
"'block_number={}' is greater than the number of blocks ({})".format(
723-
block_number, self.nblocks
724-
)
723+
f"'block_number={block_number}' is greater than "
724+
f"the number of blocks ({self.nblocks})"
725725
)
726726

727727
start = block_number * self.blocksize

fsspec/compression.py

+2-4
Original file line numberDiff line numberDiff line change
@@ -39,13 +39,11 @@ def register_compression(name, callback, extensions, force=False):
3939

4040
# Validate registration
4141
if name in compr and not force:
42-
raise ValueError("Duplicate compression registration: %s" % name)
42+
raise ValueError(f"Duplicate compression registration: {name}")
4343

4444
for ext in extensions:
4545
if ext in fsspec.utils.compressions and not force:
46-
raise ValueError(
47-
"Duplicate compression file extension: %s (%s)" % (ext, name)
48-
)
46+
raise ValueError(f"Duplicate compression file extension: {ext} ({name})")
4947

5048
compr[name] = callback
5149

fsspec/core.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -92,7 +92,7 @@ def __reduce__(self):
9292
)
9393

9494
def __repr__(self):
95-
return "<OpenFile '{}'>".format(self.path)
95+
return f"<OpenFile '{self.path}'>"
9696

9797
def __enter__(self):
9898
mode = self.mode.replace("t", "").replace("b", "") + "b"
@@ -195,7 +195,7 @@ def __getitem__(self, item):
195195
return out
196196

197197
def __repr__(self):
198-
return "<List of %s OpenFile instances>" % len(self)
198+
return f"<List of {len(self)} OpenFile instances>"
199199

200200

201201
def open_files(
@@ -498,7 +498,7 @@ def get_compression(urlpath, compression):
498498
if compression == "infer":
499499
compression = infer_compression(urlpath)
500500
if compression is not None and compression not in compr:
501-
raise ValueError("Compression type %s not supported" % compression)
501+
raise ValueError(f"Compression type {compression} not supported")
502502
return compression
503503

504504

fsspec/fuse.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -275,7 +275,7 @@ def format_help(self):
275275
for item in args.option or []:
276276
key, sep, value = item.partition("=")
277277
if not sep:
278-
parser.error(message="Wrong option: {!r}".format(item))
278+
parser.error(message=f"Wrong option: {item!r}")
279279
val = value.lower()
280280
if val.endswith("[int]"):
281281
value = int(value[: -len("[int]")])

fsspec/gui.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -70,7 +70,7 @@ class which owns it.
7070
same name.
7171
"""
7272
if name not in self.signals:
73-
raise ValueError("Attempt to assign an undeclared signal: %s" % name)
73+
raise ValueError(f"Attempt to assign an undeclared signal: {name}")
7474
self._sigs[name] = {
7575
"widget": widget,
7676
"callbacks": [],
@@ -141,7 +141,7 @@ def _emit(self, sig, value=None):
141141
142142
Calling of callbacks will halt whenever one returns False.
143143
"""
144-
logger.log(self._sigs[sig]["log"], "{}: {}".format(sig, value))
144+
logger.log(self._sigs[sig]["log"], f"{sig}: {value}")
145145
for callback in self._sigs[sig]["callbacks"]:
146146
if isinstance(callback, str):
147147
self._emit(callback)
@@ -319,7 +319,7 @@ def fs(self):
319319
def urlpath(self):
320320
"""URL of currently selected item"""
321321
return (
322-
(self.protocol.value + "://" + self.main.value[0])
322+
(f"{self.protocol.value}://{self.main.value[0]}")
323323
if self.main.value
324324
else None
325325
)

fsspec/implementations/cached.py

+11-11
Original file line numberDiff line numberDiff line change
@@ -304,10 +304,10 @@ def _open(
304304
hash, blocks = detail["fn"], detail["blocks"]
305305
if blocks is True:
306306
# stored file is complete
307-
logger.debug("Opening local copy of %s" % path)
307+
logger.debug("Opening local copy of %s", path)
308308
return open(fn, mode)
309309
# TODO: action where partial file exists in read-only cache
310-
logger.debug("Opening partially cached copy of %s" % path)
310+
logger.debug("Opening partially cached copy of %s", path)
311311
else:
312312
hash = self._mapper(path)
313313
fn = os.path.join(self.storage[-1], hash)
@@ -320,7 +320,7 @@ def _open(
320320
"uid": self.fs.ukey(path),
321321
}
322322
self._metadata.update_file(path, detail)
323-
logger.debug("Creating local sparse file for %s" % path)
323+
logger.debug("Creating local sparse file for %s", path)
324324

325325
# call target filesystems open
326326
self._mkcache()
@@ -343,9 +343,9 @@ def _open(
343343
if "blocksize" in detail:
344344
if detail["blocksize"] != f.blocksize:
345345
raise BlocksizeMismatchError(
346-
"Cached file must be reopened with same block"
347-
"size as original (old: %i, new %i)"
348-
"" % (detail["blocksize"], f.blocksize)
346+
f"Cached file must be reopened with same block"
347+
f" size as original (old: {detail['blocksize']},"
348+
f" new {f.blocksize})"
349349
)
350350
else:
351351
detail["blocksize"] = f.blocksize
@@ -570,7 +570,7 @@ def _make_local_details(self, path):
570570
"uid": self.fs.ukey(path),
571571
}
572572
self._metadata.update_file(path, detail)
573-
logger.debug("Copying %s to local cache" % path)
573+
logger.debug("Copying %s to local cache", path)
574574
return fn
575575

576576
def cat(
@@ -627,7 +627,7 @@ def _open(self, path, mode="rb", **kwargs):
627627
detail, fn = detail
628628
_, blocks = detail["fn"], detail["blocks"]
629629
if blocks is True:
630-
logger.debug("Opening local copy of %s" % path)
630+
logger.debug("Opening local copy of %s", path)
631631

632632
# In order to support downstream filesystems to be able to
633633
# infer the compression from the original filename, like
@@ -639,8 +639,8 @@ def _open(self, path, mode="rb", **kwargs):
639639
return f
640640
else:
641641
raise ValueError(
642-
"Attempt to open partially cached file %s"
643-
"as a wholly cached file" % path
642+
f"Attempt to open partially cached file {path}"
643+
f" as a wholly cached file"
644644
)
645645
else:
646646
fn = self._make_local_details(path)
@@ -723,7 +723,7 @@ def _open(self, path, mode="rb", **kwargs):
723723

724724
sha = self._mapper(path)
725725
fn = os.path.join(self.storage[-1], sha)
726-
logger.debug("Copying %s to local cache" % path)
726+
logger.debug("Copying %s to local cache", path)
727727
kwargs["mode"] = mode
728728

729729
self._mkcache()

fsspec/implementations/ftp.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -156,7 +156,7 @@ def cb(x):
156156
outfile.write(x)
157157

158158
self.ftp.retrbinary(
159-
"RETR %s" % rpath,
159+
f"RETR {rpath}",
160160
blocksize=self.blocksize,
161161
callback=cb,
162162
)
@@ -172,7 +172,7 @@ def cb(x):
172172
out.append(x)
173173

174174
self.ftp.retrbinary(
175-
"RETR %s" % path,
175+
f"RETR {path}",
176176
blocksize=self.blocksize,
177177
rest=start,
178178
callback=cb,
@@ -321,7 +321,7 @@ def callback(x):
321321

322322
try:
323323
self.fs.ftp.retrbinary(
324-
"RETR %s" % self.path,
324+
f"RETR {self.path}",
325325
blocksize=self.blocksize,
326326
rest=start,
327327
callback=callback,
@@ -339,7 +339,7 @@ def callback(x):
339339
def _upload_chunk(self, final=False):
340340
self.buffer.seek(0)
341341
self.fs.ftp.storbinary(
342-
"STOR " + self.path, self.buffer, blocksize=self.blocksize, rest=self.offset
342+
f"STOR {self.path}", self.buffer, blocksize=self.blocksize, rest=self.offset
343343
)
344344
return True
345345

fsspec/implementations/git.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -81,7 +81,7 @@ def ls(self, path, detail=True, ref=None, **kwargs):
8181
"type": "directory",
8282
"name": "/".join([path, obj.name]).lstrip("/"),
8383
"hex": obj.hex,
84-
"mode": "%o" % obj.filemode,
84+
"mode": f"{obj.filemode:o}",
8585
"size": 0,
8686
}
8787
)
@@ -91,7 +91,7 @@ def ls(self, path, detail=True, ref=None, **kwargs):
9191
"type": "file",
9292
"name": "/".join([path, obj.name]).lstrip("/"),
9393
"hex": obj.hex,
94-
"mode": "%o" % obj.filemode,
94+
"mode": f"{obj.filemode:o}",
9595
"size": obj.size,
9696
}
9797
)
@@ -102,7 +102,7 @@ def ls(self, path, detail=True, ref=None, **kwargs):
102102
"type": "file",
103103
"name": obj.name,
104104
"hex": obj.hex,
105-
"mode": "%o" % obj.filemode,
105+
"mode": f"{obj.filemode:o}",
106106
"size": obj.size,
107107
}
108108
]

fsspec/implementations/github.py

+3-7
Original file line numberDiff line numberDiff line change
@@ -79,9 +79,7 @@ def repos(cls, org_or_user, is_org=True):
7979
List of string
8080
"""
8181
r = requests.get(
82-
"https://api.github.com/{part}/{org}/repos".format(
83-
part=["users", "orgs"][is_org], org=org_or_user
84-
)
82+
f"https://api.github.com/{['users', 'orgs'][is_org]}/{org_or_user}/repos"
8583
)
8684
r.raise_for_status()
8785
return [repo["name"] for repo in r.json()]
@@ -90,8 +88,7 @@ def repos(cls, org_or_user, is_org=True):
9088
def tags(self):
9189
"""Names of tags in the repo"""
9290
r = requests.get(
93-
"https://api.github.com/repos/{org}/{repo}/tags"
94-
"".format(org=self.org, repo=self.repo),
91+
f"https://api.github.com/repos/{self.org}/{self.repo}/tags",
9592
**self.kw,
9693
)
9794
r.raise_for_status()
@@ -101,8 +98,7 @@ def tags(self):
10198
def branches(self):
10299
"""Names of branches in the repo"""
103100
r = requests.get(
104-
"https://api.github.com/repos/{org}/{repo}/branches"
105-
"".format(org=self.org, repo=self.repo),
101+
f"https://api.github.com/repos/{self.org}/{self.repo}/branches",
106102
**self.kw,
107103
)
108104
r.raise_for_status()

fsspec/implementations/http.py

+6-6
Original file line numberDiff line numberDiff line change
@@ -165,7 +165,7 @@ async def _ls_real(self, url, detail=True, **kwargs):
165165
l = l[1]
166166
if l.startswith("/") and len(l) > 1:
167167
# absolute URL on this server
168-
l = parts.scheme + "://" + parts.netloc + l
168+
l = f"{parts.scheme}://{parts.netloc}{l}"
169169
if l.startswith("http"):
170170
if self.same_schema and l.startswith(url.rstrip("/") + "/"):
171171
out.add(l)
@@ -655,8 +655,8 @@ async def async_fetch_range(self, start, end):
655655
logger.debug(f"Fetch range for {self}: {start}-{end}")
656656
kwargs = self.kwargs.copy()
657657
headers = kwargs.pop("headers", {}).copy()
658-
headers["Range"] = "bytes=%i-%i" % (start, end - 1)
659-
logger.debug(str(self.url) + " : " + headers["Range"])
658+
headers["Range"] = f"bytes={start}-{end - 1}"
659+
logger.debug(f"{self.url} : {headers['Range']}")
660660
r = await self.session.get(
661661
self.fs.encode_url(self.url), headers=headers, **kwargs
662662
)
@@ -812,7 +812,7 @@ async def get_range(session, url, start, end, file=None, **kwargs):
812812
# explicit get a range when we know it must be safe
813813
kwargs = kwargs.copy()
814814
headers = kwargs.pop("headers", {}).copy()
815-
headers["Range"] = "bytes=%i-%i" % (start, end - 1)
815+
headers["Range"] = f"bytes={start}-{end - 1}"
816816
r = await session.get(url, headers=headers, **kwargs)
817817
r.raise_for_status()
818818
async with r:
@@ -831,7 +831,7 @@ async def _file_info(url, session, size_policy="head", **kwargs):
831831
Default operation is to explicitly allow redirects and use encoding
832832
'identity' (no compression) to get the true size of the target.
833833
"""
834-
logger.debug("Retrieve file size for %s" % url)
834+
logger.debug("Retrieve file size for %s", url)
835835
kwargs = kwargs.copy()
836836
ar = kwargs.pop("allow_redirects", True)
837837
head = kwargs.get("headers", {}).copy()
@@ -844,7 +844,7 @@ async def _file_info(url, session, size_policy="head", **kwargs):
844844
elif size_policy == "get":
845845
r = await session.get(url, allow_redirects=ar, **kwargs)
846846
else:
847-
raise TypeError('size_policy must be "head" or "get", got %s' "" % size_policy)
847+
raise TypeError(f'size_policy must be "head" or "get", got {size_policy}')
848848
async with r:
849849
r.raise_for_status()
850850

fsspec/implementations/jupyter.py

+5-5
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@ def __init__(self, url, tok=None, **kwargs):
4040

4141
def ls(self, path, detail=True, **kwargs):
4242
path = self._strip_protocol(path)
43-
r = self.session.get(self.url + "/" + path)
43+
r = self.session.get(f"{self.url}/{path}")
4444
if r.status_code == 404:
4545
return FileNotFoundError(path)
4646
r.raise_for_status()
@@ -61,7 +61,7 @@ def ls(self, path, detail=True, **kwargs):
6161

6262
def cat_file(self, path, start=None, end=None, **kwargs):
6363
path = self._strip_protocol(path)
64-
r = self.session.get(self.url + "/" + path)
64+
r = self.session.get(f"{self.url}/{path}")
6565
if r.status_code == 404:
6666
return FileNotFoundError(path)
6767
r.raise_for_status()
@@ -83,7 +83,7 @@ def pipe_file(self, path, value, **_):
8383
"format": "base64",
8484
"type": "file",
8585
}
86-
self.session.put(self.url + "/" + path, json=json)
86+
self.session.put(f"{self.url}/{path}", json=json)
8787

8888
def mkdir(self, path, create_parents=True, **kwargs):
8989
path = self._strip_protocol(path)
@@ -96,11 +96,11 @@ def mkdir(self, path, create_parents=True, **kwargs):
9696
"content": None,
9797
"type": "directory",
9898
}
99-
self.session.put(self.url + "/" + path, json=json)
99+
self.session.put(f"{self.url}/{path}", json=json)
100100

101101
def _rm(self, path):
102102
path = self._strip_protocol(path)
103-
self.session.delete(self.url + "/" + path)
103+
self.session.delete(f"{self.url}/{path}")
104104

105105
def _open(self, path, mode="rb", **kwargs):
106106
path = self._strip_protocol(path)

0 commit comments

Comments
 (0)