-
Notifications
You must be signed in to change notification settings - Fork 25
/
Copy pathLinearFit.py
222 lines (161 loc) · 6.81 KB
/
LinearFit.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''SrFit example for a simple linear fit to a noisy data.
This script can be run in IPython "demo" mode. To use the demo mode,
start IPython and execute the following commands:
In [1]: %run ex03.py demo
In [2]: demo()
...
In [3]: demo()
...
'''
# Define "demo" object and exit if run with a single argument "demo".
from __future__ import print_function
import sys
if __name__ == '__main__' and sys.argv[1:] == ['demo']:
from IPython.lib.demo import ClearDemo
demo = ClearDemo(__file__)
demo.seek(1)
print('Created "demo" object. Use "demo()" to run the next section.')
sys.exit()
# <demo> auto_all
# <demo> silent
# <demo> --- stop ---
# Simulate linear data with some random Gaussian noise.
import numpy as np
xobs = np.arange(-10, 10.1)
dyobs = 0.3 * np.ones_like(xobs)
yobs = 0.5 * xobs + 3 + dyobs * np.random.randn(xobs.size)
# Plot the generated "observed" data (xobs, yobs).
import matplotlib.pyplot as plt
plt.ion(); plt.clf(); plt.hold(False)
plt.plot(xobs, yobs, 'x')
plt.title('y = 0.5*x + 3 generated with a normal noise at sigma=0.3')
plt.show()
# <demo> --- stop ---
# We are going to define a line fitting regression using SrFit.
# At first we create a SrFit Profile object that holds the observed data.
from diffpy.srfit.fitbase import Profile
linedata = Profile()
linedata.setObservedProfile(xobs, yobs, dyobs)
# The second step is to create a FitContribution object, which associates
# observed profile with a mathematical model for the dependent variable.
from diffpy.srfit.fitbase import FitContribution
linefit = FitContribution('linefit')
linefit.setProfile(linedata)
linefit.setEquation("A * x + B")
# SrFit objects can be examined by calling their show() function. SrFit
# parses the model equation and finds two parameters A, B at independent
# variable x. The values of parameters A, B are at this stage undefined.
linefit.show()
# <demo> --- stop ---
# We can set A and B to some specific values and calculate model
# observations. The x and y attributes of the FitContribution are
# the observed values, which may be re-sampled or truncated to a shorter
# fitting range.
linefit.A
linefit.A = 3
linefit.B = 5
print(linefit.A, linefit.A.value)
print(linefit.B, linefit.B.value)
# <demo> --- stop ---
# linefit.evaluate() returns the modeled values and linefit.residual
# the difference between observed and modeled data scaled by estimated
# standard deviations.
print("linefit.evaluate() =", linefit.evaluate())
print("linefit.residual() =", linefit.residual())
plt.plot(xobs, yobs, 'x', linedata.x, linefit.evaluate(), '-')
plt.title('Line simulated at A=3, B=5')
# <demo> --- stop ---
# We want to find optimum model parameters that fit the simulated curve
# to the observations. This is done by associating FitContribution with
# a FitRecipe object. FitRecipe can manage multiple fit contributions and
# optimize all models to fit their respective profiles.
from diffpy.srfit.fitbase import FitRecipe
rec = FitRecipe()
# clearFitHooks suppresses printout of iteration number
rec.clearFitHooks()
rec.addContribution(linefit)
rec.show()
# <demo> --- stop ---
# FitContributions may have many parameters. We need to tell the recipe
# which of them should be tuned by the fit.
rec.addVar(rec.linefit.A)
rec.addVar(rec.linefit.B)
# The addVar function created two attributes A, B for the rec object
# which link ot the A and B parameters of the linefit contribution.
print("rec.A =", rec.A)
print("rec.A.value =", rec.A.value)
# The names of the declared variables are stored in a rec.names
# and the corresponding values in rec.values.
print("rec.values =", rec.values)
print("rec.names =", rec.names)
# Finally the recipe objects provides a residual() function to calculate
# the difference between the observed and simulated values. The residual
# function can accept a list of new variable values in the same order as
# rec.names.
print("rec.residual() =", rec.residual())
print("rec.residual([2, 4]) =", rec.residual([2, 4]))
# <demo> --- stop ---
# The FitRecipe.residual function can be directly used with the scipy
# leastsq function for minimizing a sum of squares.
from scipy.optimize import leastsq
leastsq(rec.residual, rec.values)
# Recipe variables and the linked line-function parameters are set to the
# new optimized values.
print(rec.names, "-->", rec.values)
linefit.show()
# The calculated function is available in the ycalc attribute of the profile.
# It can be also accessed from the "linefit" contribution attribute of the
# recipe as "rec.linefit.profile.ycalc".
plt.plot(linedata.x, linedata.y, 'x', linedata.x, linedata.ycalc, '-')
plt.title('Line fit using the leastsq least-squares optimizer')
# <demo> --- stop ---
# The FitRecipe.scalarResidual function returns the sum of squares and can
# be used with a minimizer that expects scalar function:
from scipy.optimize import fmin
fmin(rec.scalarResidual, [1, 1])
print(rec.names, "-->", rec.values)
plt.plot(linedata.x, linedata.y, 'x', linedata.x, linedata.ycalc, '-')
plt.title('Line fit using the fmin scalar optimizer')
# <demo> --- stop ---
# For a converged fit recipe, the details of the fit can be extracted
# with the FitResults class.
from diffpy.srfit.fitbase import FitResults
res = FitResults(rec)
print(res)
# <demo> --- stop ---
# Variables defined in the recipe can be fixed to a constant value.
rec.fix(B=0)
# The fixed variables can be checked using the "fixednames" and
# "fixedvalues" attributes of a recipe.
print("free:", rec.names, "-->", rec.names)
print("fixed:", rec.fixednames, "-->", rec.fixedvalues)
# The fit can be rerun with a constant variable B.
leastsq(rec.residual, rec.values)
print(FitResults(rec))
plt.plot(linedata.x, linedata.y, 'x', linedata.x, linedata.ycalc, '-')
plt.title('Line fit for variable B fixed to B=0')
# <demo> --- stop ---
# Fixed variables may be released with the "free" function.
# free("all") releases all fixed variables.
rec.free('all')
# Variables may be constrained to a result of an expression.
rec.constrain(rec.A, "2 * B")
# Perform linear fit where slope is twice the offset.
leastsq(rec.residual, rec.values)
print(FitResults(rec))
plt.plot(linedata.x, linedata.y, 'x', linedata.x, linedata.ycalc, '-')
plt.title('Line fit for variable A constrained to A = 2*B')
# <demo> --- stop ---
# Constraint expressions can be removed by calling the unconstrain function.
rec.unconstrain(rec.A)
# Variables may be restrained to a specific range. Here "ub" is the upper
# boundary and "sig" acts as a standard deviation for ((x - ub)/sig)**2
# penalty function.
arst = rec.restrain(rec.A, ub=0.2, sig=0.001)
# Perform fit with the line slope restrained to a maximum value of 0.2:
leastsq(rec.residual, rec.values)
print(FitResults(rec))
plt.plot(linedata.x, linedata.y, 'x', linedata.x, linedata.ycalc, '-')
plt.title('Line fit with A restrained to an upper bound of 0.2')