Skip to content

Commit 410bf74

Browse files
committed
MAINT: Make changes for deprecations
Use iloc for clarity with Series Use setup_method
1 parent fdd13b4 commit 410bf74

27 files changed

+133
-99
lines changed

setup.cfg

+1
Original file line numberDiff line numberDiff line change
@@ -78,6 +78,7 @@ filterwarnings =
7878
error:Parsing dates in:UserWarning
7979
error:A value is trying to be set on a copy::
8080
error:Conversion of an array with ndim:DeprecationWarning:
81+
error:Series.__getitem__ treating keys:FutureWarning:
8182
markers =
8283
example: mark a test that runs example code
8384
matplotlib: mark a test that requires matplotlib

statsmodels/base/tests/test_predict.py

+3-2
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,8 @@ def test_1d(self):
3838

3939
pred = res.predict(data.iloc[:1])
4040
pdt.assert_index_equal(pred.index, data.iloc[:1].index)
41-
assert_allclose(pred.values, res.fittedvalues[0], rtol=1e-13)
41+
fv = np.asarray(res.fittedvalues)
42+
assert_allclose(pred.values, fv[0], rtol=1e-13)
4243

4344
fittedm = res.fittedvalues.mean()
4445
xmean = data.mean()
@@ -149,5 +150,5 @@ def test_predict_offset(self):
149150
pred = res.predict(data2, offset=data2['offset'])
150151
pdt.assert_index_equal(pred.index, fitted.index)
151152
fitted_nan = fitted.copy()
152-
fitted_nan[0] = np.nan
153+
fitted_nan.iloc[0] = np.nan
153154
assert_allclose(pred.values, fitted_nan.values, rtol=1e-13)

statsmodels/discrete/discrete_model.py

+4
Original file line numberDiff line numberDiff line change
@@ -2029,6 +2029,7 @@ def score(self, params):
20292029
return score
20302030

20312031
def score_factor(self, params, endog=None):
2032+
params = np.asarray(params)
20322033
if self._transparams:
20332034
alpha = np.exp(params[-1])
20342035
else:
@@ -2182,6 +2183,7 @@ def hessian_factor(self, params):
21822183
parameter.
21832184
21842185
"""
2186+
params = np.asarray(params)
21852187
if self._transparams:
21862188
alpha = np.exp(params[-1])
21872189
else:
@@ -4018,6 +4020,7 @@ def score_factor(self, params, endog=None):
40184020
The score vector of the model, i.e. the first derivative of the
40194021
loglikelihood function, evaluated at `params`
40204022
"""
4023+
params = np.asarray(params)
40214024
if self._transparams:
40224025
alpha = np.exp(params[-1])
40234026
else:
@@ -4135,6 +4138,7 @@ def hessian_factor(self, params):
41354138
hessian : ndarray, 2-D
41364139
The hessian matrix of the model.
41374140
"""
4141+
params = np.asarray(params)
41384142
if self._transparams:
41394143
alpha = np.exp(params[-1])
41404144
else:

statsmodels/discrete/tests/test_predict.py

+27-26
Original file line numberDiff line numberDiff line change
@@ -53,8 +53,9 @@ def test_basic(self):
5353
sl2 = slice(0, -(self.k_infl + 1), None)
5454
assert_allclose(res1.params[sl1], res2.params[sl2], rtol=self.rtol)
5555
assert_allclose(res1.bse[sl1], res2.bse[sl2], rtol=30 * self.rtol)
56-
assert_allclose(res1.params[-1], np.exp(res2.params[-1]),
57-
rtol=self.rtol)
56+
params1 = np.asarray(res1.params)
57+
params2 = np.asarray(res2.params)
58+
assert_allclose(params1[-1], np.exp(params2[-1]), rtol=self.rtol)
5859

5960
def test_predict(self):
6061
res1 = self.res1
@@ -64,42 +65,42 @@ def test_predict(self):
6465
# test for which="mean"
6566
rdf = res2.results_margins_atmeans
6667
pred = res1.get_prediction(ex, **self.pred_kwds_mean)
67-
assert_allclose(pred.predicted, rdf["b"][0], rtol=1e-4)
68-
assert_allclose(pred.se, rdf["se"][0], rtol=1e-4, atol=1e-4)
68+
assert_allclose(pred.predicted, rdf["b"].iloc[0], rtol=1e-4)
69+
assert_allclose(pred.se, rdf["se"].iloc[0], rtol=1e-4, atol=1e-4)
6970
if isinstance(pred, PredictionResultsMonotonic):
7071
# default method is endpoint transformation for non-ZI models
7172
ci = pred.conf_int()[0]
72-
assert_allclose(ci[0], rdf["ll"][0], rtol=1e-3, atol=1e-4)
73-
assert_allclose(ci[1], rdf["ul"][0], rtol=1e-3, atol=1e-4)
73+
assert_allclose(ci[0], rdf["ll"].iloc[0], rtol=1e-3, atol=1e-4)
74+
assert_allclose(ci[1], rdf["ul"].iloc[0], rtol=1e-3, atol=1e-4)
7475

7576
ci = pred.conf_int(method="delta")[0]
76-
assert_allclose(ci[0], rdf["ll"][0], rtol=1e-4, atol=1e-4)
77-
assert_allclose(ci[1], rdf["ul"][0], rtol=1e-4, atol=1e-4)
77+
assert_allclose(ci[0], rdf["ll"].iloc[0], rtol=1e-4, atol=1e-4)
78+
assert_allclose(ci[1], rdf["ul"].iloc[0], rtol=1e-4, atol=1e-4)
7879
else:
7980
ci = pred.conf_int()[0]
80-
assert_allclose(ci[0], rdf["ll"][0], rtol=1e-4, atol=1e-4)
81-
assert_allclose(ci[1], rdf["ul"][0], rtol=1e-4, atol=1e-4)
81+
assert_allclose(ci[0], rdf["ll"].iloc[0], rtol=1e-4, atol=1e-4)
82+
assert_allclose(ci[1], rdf["ul"].iloc[0], rtol=1e-4, atol=1e-4)
8283

8384
stat, _ = pred.t_test()
8485
assert_allclose(stat, pred.tvalues, rtol=1e-4, atol=1e-4)
8586

8687
rdf = res2.results_margins_mean
8788
pred = res1.get_prediction(average=True, **self.pred_kwds_mean)
88-
assert_allclose(pred.predicted, rdf["b"][0], rtol=3e-4) # self.rtol)
89-
assert_allclose(pred.se, rdf["se"][0], rtol=3e-3, atol=1e-4)
89+
assert_allclose(pred.predicted, rdf["b"].iloc[0], rtol=3e-4) # self.rtol)
90+
assert_allclose(pred.se, rdf["se"].iloc[0], rtol=3e-3, atol=1e-4)
9091
if isinstance(pred, PredictionResultsMonotonic):
9192
# default method is endpoint transformation for non-ZI models
9293
ci = pred.conf_int()[0]
93-
assert_allclose(ci[0], rdf["ll"][0], rtol=1e-3, atol=1e-4)
94-
assert_allclose(ci[1], rdf["ul"][0], rtol=1e-3, atol=1e-4)
94+
assert_allclose(ci[0], rdf["ll"].iloc[0], rtol=1e-3, atol=1e-4)
95+
assert_allclose(ci[1], rdf["ul"].iloc[0], rtol=1e-3, atol=1e-4)
9596

9697
ci = pred.conf_int(method="delta")[0]
97-
assert_allclose(ci[0], rdf["ll"][0], rtol=1e-4, atol=1e-4)
98-
assert_allclose(ci[1], rdf["ul"][0], rtol=1e-4, atol=1e-4)
98+
assert_allclose(ci[0], rdf["ll"].iloc[0], rtol=1e-4, atol=1e-4)
99+
assert_allclose(ci[1], rdf["ul"].iloc[0], rtol=1e-4, atol=1e-4)
99100
else:
100101
ci = pred.conf_int()[0]
101-
assert_allclose(ci[0], rdf["ll"][0], rtol=5e-4, atol=1e-4)
102-
assert_allclose(ci[1], rdf["ul"][0], rtol=5e-4, atol=1e-4)
102+
assert_allclose(ci[0], rdf["ll"].iloc[0], rtol=5e-4, atol=1e-4)
103+
assert_allclose(ci[1], rdf["ul"].iloc[0], rtol=5e-4, atol=1e-4)
103104

104105
stat, _ = pred.t_test()
105106
assert_allclose(stat, pred.tvalues, rtol=1e-4, atol=1e-4)
@@ -108,25 +109,25 @@ def test_predict(self):
108109
rdf = res2.results_margins_atmeans
109110
pred = res1.get_prediction(ex, which="prob", y_values=np.arange(2),
110111
**self.pred_kwds_mean)
111-
assert_allclose(pred.predicted, rdf["b"][1:3], rtol=3e-4) # self.rtol)
112-
assert_allclose(pred.se, rdf["se"][1:3], rtol=3e-3, atol=1e-4)
112+
assert_allclose(pred.predicted, rdf["b"].iloc[1:3], rtol=3e-4) # self.rtol)
113+
assert_allclose(pred.se, rdf["se"].iloc[1:3], rtol=3e-3, atol=1e-4)
113114

114115
ci = pred.conf_int()
115-
assert_allclose(ci[:, 0], rdf["ll"][1:3], rtol=5e-4, atol=1e-4)
116-
assert_allclose(ci[:, 1], rdf["ul"][1:3], rtol=5e-4, atol=1e-4)
116+
assert_allclose(ci[:, 0], rdf["ll"].iloc[1:3], rtol=5e-4, atol=1e-4)
117+
assert_allclose(ci[:, 1], rdf["ul"].iloc[1:3], rtol=5e-4, atol=1e-4)
117118

118119
stat, _ = pred.t_test()
119120
assert_allclose(stat, pred.tvalues, rtol=1e-4, atol=1e-4)
120121

121122
rdf = res2.results_margins_mean
122123
pred = res1.get_prediction(which="prob", y_values=np.arange(2),
123124
average=True, **self.pred_kwds_mean)
124-
assert_allclose(pred.predicted, rdf["b"][1:3], rtol=5e-3) # self.rtol)
125-
assert_allclose(pred.se, rdf["se"][1:3], rtol=3e-3, atol=5e-4)
125+
assert_allclose(pred.predicted, rdf["b"].iloc[1:3], rtol=5e-3) # self.rtol)
126+
assert_allclose(pred.se, rdf["se"].iloc[1:3], rtol=3e-3, atol=5e-4)
126127

127128
ci = pred.conf_int()
128-
assert_allclose(ci[:, 0], rdf["ll"][1:3], rtol=5e-4, atol=1e-3)
129-
assert_allclose(ci[:, 1], rdf["ul"][1:3], rtol=5e-4, atol=5e-3)
129+
assert_allclose(ci[:, 0], rdf["ll"].iloc[1:3], rtol=5e-4, atol=1e-3)
130+
assert_allclose(ci[:, 1], rdf["ul"].iloc[1:3], rtol=5e-4, atol=5e-3)
130131

131132
stat, _ = pred.t_test()
132133
assert_allclose(stat, pred.tvalues, rtol=1e-4, atol=1e-4)

statsmodels/discrete/tests/test_truncated_model.py

+8-4
Original file line numberDiff line numberDiff line change
@@ -278,8 +278,10 @@ def test_offset(self):
278278
res1 = self.res1
279279
reso = self.res_offset
280280

281-
assert_allclose(reso.params[1:], res1.params[1:], rtol=1e-8)
282-
assert_allclose(reso.params[0], res1.params[0] - 1, rtol=1e-8)
281+
paramso = np.asarray(reso.params)
282+
params1 = np.asarray(res1.params)
283+
assert_allclose(paramso[1:], params1[1:], rtol=1e-8)
284+
assert_allclose(paramso[0], params1[0] - 1, rtol=1e-8)
283285
pred1 = res1.predict()
284286
predo = reso.predict()
285287
assert_allclose(predo, pred1, rtol=1e-8)
@@ -311,8 +313,10 @@ def test_offset(self):
311313
res1 = self.res1
312314
reso = self.res_offset
313315

314-
assert_allclose(reso.params[1:], res1.params[1:], rtol=1e-8)
315-
assert_allclose(reso.params[0], res1.params[0] - 1, rtol=1e-8)
316+
paramso = np.asarray(reso.params)
317+
params1 = np.asarray(res1.params)
318+
assert_allclose(paramso[1:], params1[1:], rtol=1e-8)
319+
assert_allclose(paramso[0], params1[0] - 1, rtol=1e-8)
316320
pred1 = res1.predict()
317321
predo = reso.predict()
318322
assert_allclose(predo, pred1, rtol=1e-8)

statsmodels/emplike/originregress.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -242,11 +242,11 @@ def conf_int_el(self, param_num, upper_bound=None,
242242
r0 = chi2.ppf(1 - sig, 1)
243243
param_num = np.array([param_num])
244244
if upper_bound is None:
245-
upper_bound = (np.squeeze(self.model.fit().
246-
conf_int(.0001)[param_num])[1])
245+
ci = np.asarray(self.model.fit().conf_int(.0001))
246+
upper_bound = (np.squeeze(ci[param_num])[1])
247247
if lower_bound is None:
248-
lower_bound = (np.squeeze(self.model.fit().conf_int(.00001)
249-
[param_num])[0])
248+
ci = np.asarray(self.model.fit().conf_int(.0001))
249+
lower_bound = (np.squeeze(ci[param_num])[0])
250250
f = lambda b0: self.el_test(np.array([b0]), param_num,
251251
method=method,
252252
stochastic_exog=stochastic_exog)[0] - r0

statsmodels/gam/smooth_basis.py

+6-2
Original file line numberDiff line numberDiff line change
@@ -246,14 +246,18 @@ def get_covder2(smoother, k_points=4, integration_points=None,
246246
integral of the smoother derivative cross-product at knots plus k_points
247247
in between knots.
248248
"""
249-
from scipy.integrate import simps
249+
try:
250+
from scipy.integrate import simpson
251+
except ImportError:
252+
# Remove after SciPy 1.7 is the minimum version
253+
from scipy.integrate import simps as simpson
250254
knots = smoother.knots
251255
x = _get_integration_points(knots, k_points=3)
252256
if integration_points is None:
253257
d2 = smoother.transform(x, deriv=deriv, skip_ctransf=skip_ctransf)
254258
else:
255259
x = integration_points
256-
covd2 = simps(d2[:, :, None] * d2[:, None, :], x, axis=0)
260+
covd2 = simpson(d2[:, :, None] * d2[:, None, :], x, axis=0)
257261
return covd2
258262

259263

statsmodels/genmod/generalized_estimating_equations.py

+6-2
Original file line numberDiff line numberDiff line change
@@ -1748,8 +1748,12 @@ def qic(self, params, scale, cov_params, n_step=1000):
17481748
qv[i] = -np.sum(du**2 * (g + 1) / vu)
17491749
qv /= (4 * scale)
17501750

1751-
from scipy.integrate import trapz
1752-
ql = trapz(qv, dx=xv[1] - xv[0])
1751+
try:
1752+
from scipy.integrate import trapezoid
1753+
except ImportError:
1754+
# Remove after minimum is SciPy 1.7
1755+
from scipy.integrate import trapz as trapezoid
1756+
ql = trapezoid(qv, dx=xv[1] - xv[0])
17531757

17541758
qicu = -2 * ql + 2 * self.exog.shape[1]
17551759
qic = -2 * ql + 2 * np.trace(np.dot(omega, cov_params))

statsmodels/genmod/tests/test_gee.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -1327,7 +1327,7 @@ def test_predict(self):
13271327
x2_new = np.random.normal(size=10)
13281328
new_exog = pd.DataFrame({"X1": x1_new, "X2": x2_new})
13291329
pred6 = result.predict(exog=new_exog)
1330-
params = result.params
1330+
params = np.asarray(result.params)
13311331
pred6_correct = params[0] + params[1] * x1_new + params[2] * x2_new
13321332
assert_allclose(pred6, pred6_correct)
13331333

@@ -1530,7 +1530,7 @@ def test_sensitivity(self):
15301530
[0.0, 0.5])
15311531

15321532
# Regression test
1533-
assert_almost_equal([x.params[0] for x in ps],
1533+
assert_almost_equal([np.asarray(x.params)[0] for x in ps],
15341534
[0.1696214707458818, 0.17836097387799127])
15351535

15361536
def test_equivalence(self):

statsmodels/graphics/regressionplots.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -427,7 +427,7 @@ def plot_partregress(endog, exog_i, exog_others, data=None,
427427
ax.plot(xaxis_resid, yaxis_resid, 'o', **kwargs)
428428
fitted_line = OLS(yaxis_resid, xaxis_resid).fit()
429429

430-
fig = abline_plot(0, fitted_line.params[0], color='k', ax=ax)
430+
fig = abline_plot(0, np.asarray(fitted_line.params)[0], color='k', ax=ax)
431431

432432
if x_axis_endog_name == 'y': # for no names regression will just get a y
433433
x_axis_endog_name = 'x' # this is misleading, so use x

statsmodels/iolib/summary.py

+5-5
Original file line numberDiff line numberDiff line change
@@ -423,11 +423,11 @@ def summary_params(results, yname=None, xname=None, alpha=.05, use_t=True,
423423
#we need to give parameter alpha to conf_int
424424
results, params, std_err, tvalues, pvalues, conf_int = results
425425
else:
426-
params = results.params
427-
std_err = results.bse
428-
tvalues = results.tvalues # is this sometimes called zvalues
429-
pvalues = results.pvalues
430-
conf_int = results.conf_int(alpha)
426+
params = np.asarray(results.params)
427+
std_err = np.asarray(results.bse)
428+
tvalues = np.asarray(results.tvalues) # is this sometimes called zvalues
429+
pvalues = np.asarray(results.pvalues)
430+
conf_int = np.asarray(results.conf_int(alpha))
431431
if params.size == 0:
432432
return SimpleTable([['No Model Parameters']])
433433
# Dictionary to store the header names for the parameter part of the

statsmodels/miscmodels/tests/test_ordinal_model.py

+9-5
Original file line numberDiff line numberDiff line change
@@ -307,8 +307,10 @@ def test_offset(self):
307307
distr='probit')
308308
resf2 = modf2.fit(method='bfgs', disp=False)
309309

310-
assert_allclose(resf2.params[:3], resp.params[:3], atol=2e-4)
311-
assert_allclose(resf2.params[3], resp.params[3] + 1, atol=2e-4)
310+
resf2_params = np.asarray(resf2.params)
311+
resp_params = np.asarray(resp.params)
312+
assert_allclose(resf2_params[:3], resp_params[:3], atol=2e-4)
313+
assert_allclose(resf2_params[3], resp_params[3] + 1, atol=2e-4)
312314

313315
fitted = resp.predict()
314316
fitted2 = resf2.predict()
@@ -328,7 +330,7 @@ def test_offset(self):
328330
assert_allclose(pred_zero1, pred_zero, atol=2e-4)
329331

330332
params_adj = resp.params.copy()
331-
params_adj[3] += 1
333+
params_adj.iloc[3] += 1
332334
fitted_zero = resp.model.predict(params_adj)
333335
assert_allclose(pred_zero1, fitted_zero[:6], atol=2e-4)
334336

@@ -464,8 +466,10 @@ def test_attributes(self):
464466

465467
attributes = "bse df_resid llf aic bic llnull".split()
466468
attributes += "llnull llr llr_pvalue prsquared".split()
467-
assert_allclose(resp.params[:3], res_logit.params[:3], rtol=1e-5)
468-
assert_allclose(resp.params[3], -res_logit.params[3], rtol=1e-5)
469+
params = np.asarray(resp.params)
470+
logit_params = np.asarray(res_logit.params)
471+
assert_allclose(params[:3], logit_params[:3], rtol=1e-5)
472+
assert_allclose(params[3], -logit_params[3], rtol=1e-5)
469473
for attr in attributes:
470474
assert_allclose(getattr(resp, attr), getattr(res_logit, attr),
471475
rtol=1e-4)

statsmodels/regression/tests/test_recursive_ls.py

+6-4
Original file line numberDiff line numberDiff line change
@@ -435,8 +435,9 @@ def test_constraints_stata():
435435

436436
# See tests/results/test_rls.do
437437
desired = [.4699552366, .0005369357, .0005369357]
438-
assert_allclose(res.bse[0], desired[0], atol=1e-1)
439-
assert_allclose(res.bse[1:], desired[1:], atol=1e-4)
438+
bse = np.asarray(res.bse)
439+
assert_allclose(bse[0], desired[0], atol=1e-1)
440+
assert_allclose(bse[1:], desired[1:], atol=1e-4)
440441

441442
# See tests/results/test_rls.do
442443
desired = -534.4292052931121
@@ -469,8 +470,9 @@ def test_multiple_constraints():
469470

470471
# See tests/results/test_rls.do
471472
desired = [.4699552366, .0005369357, .0005369357, 0]
472-
assert_allclose(res.bse[0], desired[0], atol=1e-1)
473-
assert_allclose(res.bse[1:-1], desired[1:-1], atol=1e-4)
473+
bse = np.asarray(res.bse)
474+
assert_allclose(bse[0], desired[0], atol=1e-1)
475+
assert_allclose(bse[1:-1], desired[1:-1], atol=1e-4)
474476

475477
# See tests/results/test_rls.do
476478
desired = -534.4292052931121

statsmodels/tools/_testing.py

+2-1
Original file line numberDiff line numberDiff line change
@@ -69,7 +69,8 @@ def check_ttest_tvalues(results):
6969

7070
tt = res.t_test(mat[0])
7171
tt.summary() # smoke test for #1323
72-
assert_allclose(tt.pvalue, res.pvalues[0], rtol=5e-10)
72+
pvalues = np.asarray(res.pvalues)
73+
assert_allclose(tt.pvalue, pvalues[0], rtol=5e-10)
7374
# TODO: Adapt more of test_generic_methods.test_ttest_values here?
7475

7576

statsmodels/tools/numdiff.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -103,7 +103,7 @@ def _get_epsilon(x, s, epsilon, n):
103103
if h.shape != x.shape:
104104
raise ValueError("If h is not a scalar it must have the same"
105105
" shape as x.")
106-
return h
106+
return np.asarray(h)
107107

108108

109109
def approx_fprime(x, f, epsilon=None, args=(), kwargs={}, centered=False):

statsmodels/tools/tests/test_tools.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -193,22 +193,22 @@ def test_pandas_const_series():
193193
series = dta.exog["GNP"]
194194
series = tools.add_constant(series, prepend=False)
195195
assert_string_equal("const", series.columns[1])
196-
assert_equal(series.var(0)[1], 0)
196+
assert_equal(series.var(0).iloc[1], 0)
197197

198198

199199
def test_pandas_const_series_prepend():
200200
dta = longley.load_pandas()
201201
series = dta.exog["GNP"]
202202
series = tools.add_constant(series, prepend=True)
203203
assert_string_equal("const", series.columns[0])
204-
assert_equal(series.var(0)[0], 0)
204+
assert_equal(series.var(0).iloc[0], 0)
205205

206206

207207
def test_pandas_const_df():
208208
dta = longley.load_pandas().exog
209209
dta = tools.add_constant(dta, prepend=False)
210210
assert_string_equal("const", dta.columns[-1])
211-
assert_equal(dta.var(0)[-1], 0)
211+
assert_equal(dta.var(0).iloc[-1], 0)
212212

213213

214214
def test_pandas_const_df_prepend():
@@ -217,7 +217,7 @@ def test_pandas_const_df_prepend():
217217
dta["UNEMP"] /= dta["UNEMP"].std()
218218
dta = tools.add_constant(dta, prepend=True)
219219
assert_string_equal("const", dta.columns[0])
220-
assert_equal(dta.var(0)[0], 0)
220+
assert_equal(dta.var(0).iloc[0], 0)
221221

222222

223223
class TestNanDot:

0 commit comments

Comments
 (0)