forked from diffpy/diffpy.srmise
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathterminationripples.py
311 lines (253 loc) · 12.2 KB
/
terminationripples.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
#!/usr/bin/env python
##############################################################################
#
# SrMise by Luke Granlund
# (c) 2014 trustees of the Michigan State University
# (c) 2024 trustees of Columia University in the City of New York
# All rights reserved.
#
# File coded by: Luke Granlund
#
# See LICENSE.txt for license information.
#
##############################################################################
import logging
import numpy as np
import scipy.fftpack as fp
from diffpy.srmise.peaks.base import PeakFunction
logger = logging.getLogger("diffpy.srmise")
class TerminationRipples(PeakFunction):
"""Methods for evaluation and parameter estimation of a peak function with termination ripples."""
def __init__(self, base, qmax, extension=4.0, supersample=5.0, Cache=None):
"""Peak function which adds termination ripples to existing function.
Unlike other peak functions, TerminationRipples can only be evaluated
over a uniform grid, or at a single value using an ad hoc uniform grid
defined by qmax, extension, and supersample.
Parameters
base: Instance of PeakFunction subclass.
qmax: Cut-off frequency in reciprocal space.
extension: How many multiples of 2pi/qmax to extend calculations in
order to avoid edge effects.
supersample: Number intervals over 2pi/qmax when a natural interval
cannot be determined while extending calculations.
Cache: A class (not instance) which implements caching of PeakFunction
evaluations."""
parameterdict = base.parameterdict
formats = base.parformats
default_formats = base.default_formats
self.base = base
self.qmax = qmax
self.extension = extension
self.supersample = supersample
metadict = {}
metadict["qmax"] = (qmax, repr)
metadict["extension"] = (extension, repr)
metadict["supersample"] = (supersample, repr)
PeakFunction.__init__(self, parameterdict, formats, default_formats, metadict, base, Cache)
return
# Methods required by PeakFunction ####
# TODO: A smart way to convert from the basefunctions estimate to an
# appropriate one when ripples are considered. This may not be necessary,
# though.
def estimate_parameters(self, r, y):
"""Estimate parameters for single peak from data provided.
Uses estimation routine provided by base peak function.
Parameters
r: (Numpy array) Data along r from which to estimate
y: (Numpy array) Data along y from which to estimate
Returns Numpy array of parameters in the default internal format.
Raises SrMiseEstimationError if parameters cannot be estimated for any
reason."""
return self.base.estimate_parameters(r, y)
# TODO: Can this be implemented sanely for termination ripples?
def scale_at(self, pars, x, scale):
"""Change parameters so value(x)->scale*value(x) for the base function.
Does not change position or height of peak's maxima. Raises
SrMiseScalingError if the parameters cannot be scaled.
Parameters
pars: (Array) Parameters corresponding to a single peak
x: (float) Position of the border
scale: (float > 0) Size of scaling at x."""
return self.base.scale_at(pars, x, scale)
def _jacobianraw(self, pars, r, free):
"""Return Jacobian of base function with termination ripples.
Parameters
pars: Sequence of parameters for a single peak
r: sequence or scalar over which pars is evaluated
free: sequence of booleans which determines which derivatives are
needed. True for evaluation, False for no evaluation."""
return self.base._jacobianraw(pars, r, free)
def _transform_derivativesraw(self, pars, in_format, out_format):
"""Return gradient matrix for the pars converted from in_format to out_format.
Parameters
pars: Sequence of parameters
in_format: A format defined for base peak function
out_format: A format defined for base peak function"""
return self.base._transform_derivativesraw(pars, in_format, out_format)
def _transform_parametersraw(self, pars, in_format, out_format):
"""Convert parameter values from in_format to out_format.
Parameters
pars: Sequence of parameters
in_format: A format defined for base peak function
out_format: A format defined for base peak function"""
return self.base._transform_parametersraw(pars, in_format, out_format)
def _valueraw(self, pars, r):
"""Return value of base peak function for the given parameters and r values.
pars: Sequence of parameters for a single peak
r: sequence or scalar over which pars is evaluated"""
return self.base._valueraw(pars, r)
# Overridden PeakFunction functions ####
# jacobian() and value() are not normally overridden by PeakFunction
# subclasses, but are here to minimize the effect of edge-effects while
# introducing termination ripples.
def jacobian(self, peak, r, rng=None):
"""Calculate (rippled) jacobian, possibly restricted by range.
peak: The Peak to be evaluated
r: sequence or scalar over which peak is evaluated
rng: Optional slice object restricts which r-values are evaluated.
The output has same length as r, but unevaluated objects have
a default value of 0. If caching is enabled these may be
previously calculated values instead."""
if self is not peak._owner:
raise ValueError(
"Argument 'peak' must be evaluated by the "
"PeakFunction subclass instance with which "
"it is associated."
)
# normally r will be a sequence, but also allow single numeric values
try:
if len(r) > 1:
dr = (r[-1] - r[0]) / (len(r) - 1)
else:
# dr is ad hoc if r is a single point
dr = 2 * np.pi / (self.supersample * self.qmax)
if rng is None:
rng = slice(0, len(r))
rpart = r[rng]
(ext_r, ext_slice) = self.extend_grid(rpart, dr)
jac = self._jacobianraw(peak.pars, ext_r, peak.free)
output = [None for j in jac]
for idx in range(len(output)):
if jac[idx] is not None:
jac[idx] = self.cut_freq(jac[idx], dr)
output[idx] = r * 0.0
output[idx][rng] = jac[idx][ext_slice]
return output
except TypeError:
# dr is ad hoc if r is a single point.
dr = 2 * np.pi / (self.supersample * self.qmax)
(ext_r, ext_slice) = self.extend_grid(np.array([r]), dr)
jac = self._jacobianraw(peak.pars, ext_r, peak.free)
for idx in range(len(output)):
if jac[idx] is not None:
jac[idx] = self.cut_freq(jac[idx], dr)[ext_slice][0]
return jac
def value(self, peak, r, rng=None):
"""Calculate (rippled) value of peak, possibly restricted by range.
This function overrides its counterpart in PeakFunction in order
to minimize the impact of edge-effects from introducing termination
ripples into an existing peak function.
peak: The Peak to be evaluated
r: sequence or scalar over which peak is evaluated
rng: Optional slice object restricts which r-values are evaluated.
The output has same length as r, but unevaluated objects have
a default value of 0. If caching is enabled these may be
previously calculated values instead.
"""
if self is not peak._owner:
raise ValueError(
"Argument 'peak' must be evaluated by the "
"PeakFunction subclass instance with which "
"it is associated."
)
# normally r will be a sequence, but also allow single numeric values
dr_super = 2 * np.pi / (self.supersample * self.qmax)
if np.isscalar(r):
# dr is ad hoc if r is a single point.
(ext_r, ext_slice) = self.extend_grid(np.array([r]), dr_super)
value = self._valueraw(peak.pars, ext_r)
value = self.cut_freq(value, dr_super)
return value[ext_slice][0]
else:
if rng is None:
rng = slice(0, len(r))
output = r * 0.0
# Make sure the actual dr used for finding termination ripples
# is at least as fine as dr_super, while still calculating the
# function at precisely the requested points.
# When the underlying function is sampled too coarsely it can
# miss critical high frequency components and return a very
# poor approximation to the continuous case. The actual fineness
# of sampling needed to avoid the worst of these discretization
# issues is difficult to determine without detailed knowledge
# of the underlying function.
dr = (r[-1] - r[0]) / (len(r) - 1)
segments = np.ceil(dr / dr_super)
dr_segmented = dr / segments
rpart = r[rng]
if segments > 1:
rpart = np.arange(rpart[0], rpart[-1] + dr_segmented / 2, dr_segmented)
(ext_r, ext_slice) = self.extend_grid(rpart, dr_segmented)
value = self._valueraw(peak.pars, ext_r)
value = self.cut_freq(value, dr_segmented)
output[rng] = value[ext_slice][::segments]
return output
def getmodule(self):
return __name__
# Other methods ####
def cut_freq(self, sequence, delta):
"""Remove high-frequency components from sequence.
This is equivalent to the discrete convolution of a signal with a sinc
function sin(2*pi*r/qmax)/r.
Parameters
sequence: (numpy array) The sequence to alter.
delta: The spacing between elements in sequence."""
padlen = int(2 ** np.ceil(np.log2(len(sequence))))
padseq = fp.fft(sequence, padlen)
dq = 2 * np.pi / ((padlen - 1) * delta)
lowidx = int(np.ceil(self.qmax / dq))
hiidx = padlen + 1 - lowidx
# Remove hi-frequency components
padseq[lowidx:hiidx] = 0
padseq = fp.ifft(padseq)
return np.real(padseq[0 : len(sequence)])
def extend_grid(self, r, dr):
"""Return (extended r, slice giving original range)."""
ext = self.extension * 2 * np.pi / self.qmax
left_ext = np.arange(r[0] - dr, max(0.0, r[0] - ext - dr), -dr)[::-1]
right_ext = np.arange(r[-1] + dr, r[-1] + ext + dr, dr)
ext_r = np.concatenate((left_ext, r, right_ext))
ext_slice = slice(len(left_ext), len(ext_r) - len(right_ext))
return (ext_r, ext_slice)
# end of class TerminationRipples
# simple test code
if __name__ == "__main__":
import matplotlib.pyplot as plt
from numpy.random import randn
from diffpy.srmise.modelcluster import ModelCluster
from diffpy.srmise.modelevaluators import AICc
from diffpy.srmise.peaks import Peaks
from diffpy.srmise.peaks.gaussianoverr import GaussianOverR
res = 0.01
r = np.arange(2, 4, res)
err = np.ones(len(r)) # default unknown errors
pf1 = GaussianOverR(0.7)
pf2 = TerminationRipples(pf1, 20.0)
evaluator = AICc()
pars = [[3, 0.2, 10], [3.5, 0.2, 10]]
ideal_peaks = Peaks([pf1.createpeak(p, "pwa") for p in pars])
ripple_peaks = Peaks([pf2.createpeak(p, "pwa") for p in pars])
y_ideal = ideal_peaks.value(r)
y_ripple = ripple_peaks.value(r) + 0.1 * randn(len(r))
guesspars = [[2.7, 0.15, 5], [3.7, 0.3, 5]]
guess_peaks = Peaks([pf2.createpeak(p, "pwa") for p in guesspars])
cluster = ModelCluster(guess_peaks, r, y_ripple, err, None, AICc, [pf2])
qual1 = cluster.quality()
print(qual1.stat)
cluster.fit()
yfit = cluster.calc()
qual2 = cluster.quality()
print(qual2.stat)
plt.figure(1)
plt.plot(r, y_ideal, r, y_ripple, r, yfit)
plt.show()