Skip to content

Commit aabeafb

Browse files
authored
Merge pull request #19 from fact-project/calc_sensitivity
Add function to calculate sensitivity in bins and bootstrap
2 parents 4df0bbd + 226a173 commit aabeafb

File tree

1 file changed

+92
-1
lines changed

1 file changed

+92
-1
lines changed

irf/sensitivity.py

+92-1
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,8 @@
33
from fact.analysis import li_ma_significance
44
from scipy.optimize import newton
55
import warnings
6+
import pandas as pd
7+
from scipy.stats import norm
68

79

810
@u.quantity_input(t_obs=u.hour, t_ref=u.hour)
@@ -11,7 +13,7 @@ def relative_sensitivity(
1113
n_off,
1214
alpha,
1315
t_obs,
14-
t_ref=50*u.hour,
16+
t_ref=u.Quantity(50, u.hour),
1517
target_significance=5,
1618
significance_function=li_ma_significance,
1719
initial_guess=0.5,
@@ -99,3 +101,92 @@ def equation(relative_flux):
99101
'significance_function',
100102
]
101103
)
104+
105+
106+
@u.quantity_input()
107+
def calculate_sensitivity(
108+
df,
109+
theta2_cut,
110+
e_min: u.TeV,
111+
e_max: u.TeV,
112+
n_bins,
113+
t_obs: u.hour,
114+
t_ref: u.hour,
115+
n_bootstrap=100,
116+
min_events=5,
117+
):
118+
theta_cut = np.sqrt(theta2_cut)
119+
120+
def select_on(df):
121+
return df.theta_deg <= theta_cut
122+
123+
def select_off(df):
124+
m = df['theta_deg_off_1'] <= theta_cut
125+
for i in range(2, 6):
126+
m |= df[f'theta_deg_off_{i}'] <= theta_cut
127+
return m
128+
129+
df = df.copy()
130+
if 'weight' not in df.columns:
131+
df['weight'] = 1
132+
133+
bin_edges = np.logspace(np.log10(e_min / u.GeV), np.log10(e_max / u.GeV), n_bins + 1)
134+
bin_edges = np.append(-np.inf, np.append(bin_edges, np.inf))
135+
bin_id = np.arange(n_bins + 2) + 1
136+
137+
df['bin'] = np.digitize(df['gamma_energy_prediction'].values, bin_edges)
138+
139+
sensitivity = pd.DataFrame(index=bin_id)
140+
sensitivity['e_low'] = bin_edges[:-1]
141+
sensitivity['e_high'] = bin_edges[1:]
142+
sensitivity['e_center'] = 0.5 * (sensitivity['e_low'] + sensitivity['e_high'])
143+
sensitivity['e_width'] = np.diff(bin_edges)
144+
sensitivity['n_on_weighted'] = 0
145+
sensitivity['n_off_weighted'] = 0
146+
sensitivity['n_on'] = 0
147+
sensitivity['n_off'] = 0
148+
sensitivity.index.name = 'bin'
149+
150+
sensitivities = []
151+
for i in range(n_bootstrap):
152+
cur_sensitivity = sensitivity.copy()
153+
154+
sampled = df.sample(len(df), replace=True)
155+
for bin_id, g in sampled.groupby('bin'):
156+
on = select_on(g)
157+
off = select_off(g)
158+
159+
cur_sensitivity.loc[bin_id, 'n_on_weighted'] = g.loc[on, 'weight'].sum()
160+
cur_sensitivity.loc[bin_id, 'n_off_weighted'] += g.loc[off, 'weight'].sum()
161+
cur_sensitivity.loc[bin_id, 'n_on'] = on.sum()
162+
cur_sensitivity.loc[bin_id, 'n_off'] = off.sum()
163+
164+
cur_sensitivity['relative_sensitivity'] = relative_sensitivity(
165+
cur_sensitivity['n_on_weighted'],
166+
cur_sensitivity['n_off_weighted'],
167+
alpha=0.2,
168+
t_obs=t_obs,
169+
)
170+
cur_sensitivity['iteration'] = i
171+
172+
sensitivities.append(cur_sensitivity.reset_index())
173+
174+
sensitivities = pd.concat(sensitivities)
175+
176+
# aggregate bootstrap samples
177+
grouped = sensitivities.groupby('bin')
178+
keys = ['n_on', 'n_off', 'n_on_weighted', 'n_off_weighted', 'relative_sensitivity']
179+
for key in keys:
180+
sensitivity[key] = grouped[key].median()
181+
sensitivity[key + '_uncertainty_low'] = grouped[key].quantile(norm.cdf(-1))
182+
sensitivity[key + '_uncertainty_high'] = grouped[key].quantile(norm.cdf(1))
183+
sensitivity['count'] = grouped['relative_sensitivity'].count()
184+
185+
invalid = (
186+
(sensitivity['n_on'] < min_events)
187+
| (sensitivity['n_off'] < min_events)
188+
| (sensitivity['count'] / n_bootstrap <= 0.95)
189+
)
190+
sensitivity.loc[invalid, 'relative_sensitivity'] = np.nan
191+
192+
return sensitivity

0 commit comments

Comments
 (0)