Skip to content

Commit 5776026

Browse files
committed
More fixes
1 parent a0b0d20 commit 5776026

12 files changed

+69
-46
lines changed

adaptive/types.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88
else:
99
from typing_extensions import TypeAlias
1010

11-
Float: TypeAlias = Union[float, np.float_]
11+
Float: TypeAlias = Union[float, np.float64]
1212
Bool: TypeAlias = Union[bool, np.bool_]
1313
Int: TypeAlias = Union[int, np.int_]
1414
Real: TypeAlias = Union[Float, Int]

docs/source/algorithms_and_examples.md

+5-2
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ jupytext:
44
extension: .md
55
format_name: myst
66
format_version: 0.13
7-
jupytext_version: 1.14.5
7+
jupytext_version: 1.16.1
88
kernelspec:
99
display_name: python3
1010
name: python3
@@ -217,7 +217,10 @@ scatter = fig.data[0]
217217
coords_col = [
218218
(x, y, z, color)
219219
for x, y, z, color in zip(
220-
scatter["x"], scatter["y"], scatter["z"], scatter.marker["color"]
220+
scatter["x"],
221+
scatter["y"],
222+
scatter["z"],
223+
scatter.marker["color"],
221224
)
222225
if not (x > 0 and y > 0)
223226
]

docs/source/benchmarks.md

+6-5
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ jupytext:
44
extension: .md
55
format_name: myst
66
format_version: 0.13
7-
jupytext_version: 1.14.5
7+
jupytext_version: 1.16.1
88
kernelspec:
99
display_name: adaptive
1010
language: python
@@ -138,7 +138,7 @@ def run_and_plot(learner, **goal):
138138
bms[learner.function.__name__] = bm
139139
display(pd.DataFrame([bm])) # noqa: F821
140140
return plot(learner, homo_learner).relabel(
141-
f"{learner.function.__name__} function with {learner.npoints} points"
141+
f"{learner.function.__name__} function with {learner.npoints} points",
142142
)
143143
144144
@@ -207,7 +207,7 @@ Nonetheless, the algorithm still focuses on areas of the function that have more
207207
```{code-cell} ipython3
208208
def gaussian(x, mu=0, sigma=0.5):
209209
return (1 / np.sqrt(2 * np.pi * sigma**2)) * np.exp(
210-
-((x - mu) ** 2) / (2 * sigma**2)
210+
-((x - mu) ** 2) / (2 * sigma**2),
211211
)
212212
213213
@@ -359,7 +359,7 @@ def gaussian_surface(xy, mu=(0, 0), sigma=(1, 1)):
359359
mu_x, mu_y = mu
360360
sigma_x, sigma_y = sigma
361361
return (1 / (2 * np.pi * sigma_x * sigma_y)) * np.exp(
362-
-((x - mu_x) ** 2 / (2 * sigma_x**2) + (y - mu_y) ** 2 / (2 * sigma_y**2))
362+
-((x - mu_x) ** 2 / (2 * sigma_x**2) + (y - mu_y) ** 2 / (2 * sigma_y**2)),
363363
)
364364
365365
@@ -380,7 +380,8 @@ def sinusoidal_surface(xy, amplitude=1, frequency=(0.3, 3)):
380380
381381
382382
learner = adaptive.Learner2D(
383-
sinusoidal_surface, bounds=[(-2 * np.pi, 2 * np.pi), (-2 * np.pi, 2 * np.pi)]
383+
sinusoidal_surface,
384+
bounds=[(-2 * np.pi, 2 * np.pi), (-2 * np.pi, 2 * np.pi)],
384385
)
385386
run_and_plot(learner, loss_goal=0.01)
386387
```

docs/source/logo.md

+2-4
Original file line numberDiff line numberDiff line change
@@ -4,13 +4,11 @@ jupytext:
44
extension: .md
55
format_name: myst
66
format_version: 0.13
7-
jupytext_version: 1.14.5
7+
jupytext_version: 1.16.1
88
kernelspec:
99
display_name: Python 3 (ipykernel)
1010
language: python
1111
name: python3
12-
execution:
13-
timeout: 300
1412
---
1513

1614
```{code-cell} ipython3
@@ -197,7 +195,7 @@ def save_webm(fname, fnames):
197195
"-y",
198196
fname,
199197
]
200-
return subprocess.run(args, capture_output=True)
198+
return subprocess.run(args, capture_output=True, check=False)
201199
202200
203201
if __name__ == "__main__":

docs/source/tutorial/tutorial.BalancingLearner.md

+5-2
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ jupytext:
44
extension: .md
55
format_name: myst
66
format_version: 0.13
7-
jupytext_version: 1.14.5
7+
jupytext_version: 1.16.1
88
kernelspec:
99
display_name: python3
1010
name: python3
@@ -88,7 +88,10 @@ combos = {
8888
}
8989
9090
learner = adaptive.BalancingLearner.from_product(
91-
jacobi, adaptive.Learner1D, {"bounds": (0, 1)}, combos
91+
jacobi,
92+
adaptive.Learner1D,
93+
{"bounds": (0, 1)},
94+
combos,
9295
)
9396
9497
runner = adaptive.BlockingRunner(learner, loss_goal=0.01)

docs/source/tutorial/tutorial.IntegratorLearner.md

+3-3
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ jupytext:
44
extension: .md
55
format_name: myst
66
format_version: 0.13
7-
jupytext_version: 1.14.7
7+
jupytext_version: 1.16.1
88
kernelspec:
99
display_name: python3
1010
name: python3
@@ -80,13 +80,13 @@ runner is done.
8080
```{code-cell} ipython3
8181
if not runner.task.done():
8282
raise RuntimeError(
83-
"Wait for the runner to finish before executing the cells below!"
83+
"Wait for the runner to finish before executing the cells below!",
8484
)
8585
```
8686

8787
```{code-cell} ipython3
8888
print(
89-
f"The integral value is {learner.igral} with the corresponding error of {learner.err}"
89+
f"The integral value is {learner.igral} with the corresponding error of {learner.err}",
9090
)
9191
learner.plot()
9292
```

docs/source/tutorial/tutorial.Learner1D.md

+5-4
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ jupytext:
44
extension: .md
55
format_name: myst
66
format_version: 0.13
7-
jupytext_version: 1.14.5
7+
jupytext_version: 1.16.1
88
kernelspec:
99
display_name: python3
1010
name: python3
@@ -92,7 +92,7 @@ We can now compare the adaptive sampling to a homogeneous sampling with the same
9292
```{code-cell} ipython3
9393
if not runner.task.done():
9494
raise RuntimeError(
95-
"Wait for the runner to finish before executing the cells below!"
95+
"Wait for the runner to finish before executing the cells below!",
9696
)
9797
```
9898

@@ -119,7 +119,7 @@ offsets = [random.uniform(-0.8, 0.8) for _ in range(3)]
119119
def f_levels(x, offsets=offsets):
120120
a = 0.01
121121
return np.array(
122-
[offset + x + a**2 / (a**2 + (x - offset) ** 2) for offset in offsets]
122+
[offset + x + a**2 / (a**2 + (x - offset) ** 2) for offset in offsets],
123123
)
124124
```
125125

@@ -129,7 +129,8 @@ The `Learner1D` can be used for such functions:
129129
```{code-cell} ipython3
130130
learner = adaptive.Learner1D(f_levels, bounds=(-1, 1))
131131
runner = adaptive.Runner(
132-
learner, loss_goal=0.01
132+
learner,
133+
loss_goal=0.01,
133134
) # continue until `learner.loss()<=0.01`
134135
```
135136

docs/source/tutorial/tutorial.LearnerND.md

+4-2
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ jupytext:
44
extension: .md
55
format_name: myst
66
format_version: 0.13
7-
jupytext_version: 1.14.5
7+
jupytext_version: 1.16.1
88
kernelspec:
99
display_name: python3
1010
name: python3
@@ -88,7 +88,9 @@ def plot_cut(x1, x2, directions, learner=learner):
8888
8989
dm = hv.DynamicMap(plot_cut, kdims=["v1", "v2", "directions"])
9090
dm = dm.redim.values(
91-
v1=np.linspace(-1, 1, 6), v2=np.linspace(-1, 1, 6), directions=["xy", "xz", "yz"]
91+
v1=np.linspace(-1, 1, 6),
92+
v2=np.linspace(-1, 1, 6),
93+
directions=["xy", "xz", "yz"],
9294
)
9395
9496
# In a notebook one would run `dm` however we want a statically generated

docs/source/tutorial/tutorial.advanced-topics.md

+7-5
Original file line numberDiff line numberDiff line change
@@ -4,11 +4,12 @@ jupytext:
44
extension: .md
55
format_name: myst
66
format_version: 0.13
7-
jupytext_version: 1.14.5
7+
jupytext_version: 1.16.1
88
kernelspec:
99
display_name: python3
1010
name: python3
1111
---
12+
1213
(TutorialAdvancedTopics)=
1314
# Advanced Topics
1415

@@ -92,7 +93,8 @@ def slow_f(x):
9293
learner = adaptive.Learner1D(slow_f, bounds=[0, 1])
9394
runner = adaptive.Runner(learner, npoints_goal=100)
9495
runner.start_periodic_saving(
95-
save_kwargs={"fname": "data/periodic_example.p"}, interval=6
96+
save_kwargs={"fname": "data/periodic_example.p"},
97+
interval=6,
9698
)
9799
```
98100

@@ -241,7 +243,7 @@ def will_raise(x):
241243
242244
learner = adaptive.Learner1D(will_raise, (-1, 1))
243245
runner = adaptive.Runner(
244-
learner
246+
learner,
245247
) # without 'goal' the runner will run forever unless cancelled
246248
```
247249

@@ -365,6 +367,7 @@ await runner.task # This is not needed in a notebook environment!
365367
# The result will only be set when the runner is done.
366368
timer.result()
367369
```
370+
368371
(CustomParallelization)=
369372
## Custom parallelization using coroutines
370373

@@ -378,8 +381,7 @@ We will focus on a function `f(x)` that consists of two distinct components: a s
378381

379382
```{code-cell} ipython3
380383
def f(x): # example function without caching
381-
"""
382-
Integer part of `x` repeats and should be reused
384+
"""Integer part of `x` repeats and should be reused
383385
Decimal part requires a new computation
384386
"""
385387
return g(int(x)) + h(x % 1)

docs/source/tutorial/tutorial.custom_loss.md

+15-8
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ jupytext:
44
extension: .md
55
format_name: myst
66
format_version: 0.13
7-
jupytext_version: 1.14.5
7+
jupytext_version: 1.16.1
88
kernelspec:
99
display_name: python3
1010
name: python3
@@ -72,7 +72,9 @@ def f_divergent_1d(x):
7272
7373
7474
learner = adaptive.Learner1D(
75-
f_divergent_1d, (-1, 1), loss_per_interval=uniform_sampling_1d
75+
f_divergent_1d,
76+
(-1, 1),
77+
loss_per_interval=uniform_sampling_1d,
7678
)
7779
runner = adaptive.BlockingRunner(learner, loss_goal=0.01)
7880
learner.plot().select(y=(0, 10000))
@@ -92,12 +94,15 @@ def f_divergent_2d(xy):
9294
9395
9496
learner = adaptive.Learner2D(
95-
f_divergent_2d, [(-1, 1), (-1, 1)], loss_per_triangle=uniform_sampling_2d
97+
f_divergent_2d,
98+
[(-1, 1), (-1, 1)],
99+
loss_per_triangle=uniform_sampling_2d,
96100
)
97101
98102
# this takes a while, so use the async Runner so we know *something* is happening
99103
runner = adaptive.Runner(
100-
learner, goal=lambda lrn: lrn.loss() < 0.03 or lrn.npoints > 1000
104+
learner,
105+
goal=lambda lrn: lrn.loss() < 0.03 or lrn.npoints > 1000,
101106
)
102107
```
103108

@@ -134,7 +139,8 @@ After all subdomains are appropriately small it will prioritise places where the
134139
```{code-cell} ipython3
135140
def resolution_loss_function(min_distance=0, max_distance=1):
136141
"""min_distance and max_distance should be in between 0 and 1
137-
because the total area is normalized to 1."""
142+
because the total area is normalized to 1.
143+
"""
138144
139145
def resolution_loss(ip):
140146
from adaptive.learner.learner2D import areas, default_loss
@@ -143,10 +149,10 @@ def resolution_loss_function(min_distance=0, max_distance=1):
143149
144150
A = areas(ip)
145151
# Setting areas with a small area to zero such that they won't be chosen again
146-
loss[A < min_distance**2] = 0
152+
loss[min_distance**2 > A] = 0
147153
148154
# Setting triangles that have a size larger than max_distance to infinite loss
149-
loss[A > max_distance**2] = np.inf
155+
loss[max_distance**2 < A] = np.inf
150156
151157
return loss
152158
@@ -158,7 +164,8 @@ loss = resolution_loss_function(min_distance=0.01)
158164
learner = adaptive.Learner2D(f_divergent_2d, [(-1, 1), (-1, 1)], loss_per_triangle=loss)
159165
runner = adaptive.BlockingRunner(learner, loss_goal=0.02)
160166
learner.plot(tri_alpha=0.3).relabel("1 / (x^2 + y^2) in log scale").opts(
161-
hv.opts.EdgePaths(color="w"), hv.opts.Image(logz=True, colorbar=True)
167+
hv.opts.EdgePaths(color="w"),
168+
hv.opts.Image(logz=True, colorbar=True),
162169
)
163170
```
164171

example-notebook.ipynb

+15-9
Original file line numberDiff line numberDiff line change
@@ -490,7 +490,7 @@
490490
"\n",
491491
"print(\n",
492492
" f\"The integral value is {learner.igral} \"\n",
493-
" f\"with a corresponding error of {learner.err}\"\n",
493+
" f\"with a corresponding error of {learner.err}\",\n",
494494
")\n",
495495
"learner.plot()"
496496
]
@@ -683,7 +683,9 @@
683683
"\n",
684684
"\n",
685685
"learner = adaptive.Learner1D(\n",
686-
" f_divergent_1d, (-1, 1), loss_per_interval=uniform_sampling_1d\n",
686+
" f_divergent_1d,\n",
687+
" (-1, 1),\n",
688+
" loss_per_interval=uniform_sampling_1d,\n",
687689
")\n",
688690
"runner = adaptive.BlockingRunner(learner, loss_goal=0.01)\n",
689691
"learner.plot().select(y=(0, 10000))"
@@ -755,8 +757,8 @@
755757
"source": [
756758
"def resolution_loss(ip, min_distance=0, max_distance=1):\n",
757759
" \"\"\"min_distance and max_distance should be in between 0 and 1\n",
758-
" because the total area is normalized to 1.\"\"\"\n",
759-
"\n",
760+
" because the total area is normalized to 1.\n",
761+
" \"\"\"\n",
760762
" from adaptive.learner.learner2D import areas, deviations\n",
761763
"\n",
762764
" A = areas(ip)\n",
@@ -773,10 +775,10 @@
773775
" loss = np.sqrt(A) * dev + A\n",
774776
"\n",
775777
" # Setting areas with a small area to zero such that they won't be chosen again\n",
776-
" loss[A < min_distance**2] = 0\n",
778+
" loss[min_distance**2 > A] = 0\n",
777779
"\n",
778780
" # Setting triangles that have a size larger than max_distance to infinite loss\n",
779-
" loss[A > max_distance**2] = np.inf\n",
781+
" loss[max_distance**2 < A] = np.inf\n",
780782
"\n",
781783
" return loss\n",
782784
"\n",
@@ -874,7 +876,10 @@
874876
"}\n",
875877
"\n",
876878
"learner = adaptive.BalancingLearner.from_product(\n",
877-
" jacobi, adaptive.Learner1D, {\"bounds\": (0, 1)}, combos\n",
879+
" jacobi,\n",
880+
" adaptive.Learner1D,\n",
881+
" {\"bounds\": (0, 1)},\n",
882+
" combos,\n",
878883
")\n",
879884
"\n",
880885
"runner = adaptive.BlockingRunner(learner, loss_goal=0.01)\n",
@@ -1249,7 +1254,8 @@
12491254
"runner = adaptive.Runner(learner, npoints_goal=100)\n",
12501255
"\n",
12511256
"runner.start_periodic_saving(\n",
1252-
" save_kwargs={\"fname\": \"data/periodic_example.p\"}, interval=6\n",
1257+
" save_kwargs={\"fname\": \"data/periodic_example.p\"},\n",
1258+
" interval=6,\n",
12531259
")\n",
12541260
"\n",
12551261
"runner.live_info()"
@@ -1487,7 +1493,7 @@
14871493
"\n",
14881494
"learner = adaptive.Learner1D(will_raise, (-1, 1))\n",
14891495
"runner = adaptive.Runner(\n",
1490-
" learner\n",
1496+
" learner,\n",
14911497
") # without 'goal' the runner will run forever unless cancelled\n",
14921498
"runner.live_info()\n",
14931499
"runner.live_plot()"

0 commit comments

Comments
 (0)