@@ -375,17 +375,21 @@ def update_param_values_detector(
375
375
"""
376
376
Modification of the update_param_values function to use with the Detectors class.
377
377
"""
378
+ assert hypo_maker .__class__ .__name__ == "Detectors" , "hypo_maker is not Detectors class"
379
+
380
+ if isinstance (params , Param ): params = ParamSet (params )
381
+
378
382
for distribution_maker in hypo_maker :
379
- update_param_values ( distribution_maker , params )
380
-
381
- if isinstance ( params , Param ): params = ParamSet ( params ) # just for the following
382
-
383
- for p in params . names : # now update params with det_names inside
384
- for i , det_name in enumerate ( hypo_maker . det_names ):
385
- if det_name in p :
386
- cp = deepcopy ( params [ p ])
387
- cp . name = cp . name . replace ( '_' + det_name , "" )
388
- update_param_values ( hypo_maker ._distribution_makers [ i ], cp )
383
+ ps = deepcopy ( params )
384
+ for p in ps . names :
385
+ if distribution_maker . detector_name in p :
386
+ p_name = p . replace ( '_' + distribution_maker . detector_name , "" )
387
+ if p_name in ps . names :
388
+ ps . remove ( p_name )
389
+ ps [ p ]. name = p_name
390
+ update_param_values ( distribution_maker , ps ,
391
+ update_nominal_values , update_range , update_is_fixed )
392
+ hypo_maker .init_params ( )
389
393
390
394
# TODO: move this to a central location prob. in utils
391
395
class Counter (object ):
@@ -1166,7 +1170,10 @@ def _fit_octants(self, data_dist, hypo_maker, metric, external_priors_penalty,
1166
1170
# Copy the fitted parameter values from the best fit case into the hypo maker's
1167
1171
# parameter values.
1168
1172
# Also reinstate the original parameter range for the angle
1169
- update_param_values (hypo_maker , best_fit_info .params .free , update_range = True )
1173
+ if hypo_maker .__class__ .__name__ == "Detectors" :
1174
+ update_param_values_detector (hypo_maker , best_fit_info .params .free , update_range = True )
1175
+ else :
1176
+ update_param_values (hypo_maker , best_fit_info .params .free , update_range = True )
1170
1177
1171
1178
return best_fit_info
1172
1179
@@ -1310,7 +1317,10 @@ def _fit_grid_scan(self, data_dist, hypo_maker, metric,
1310
1317
mod_param .is_fixed = True
1311
1318
# It is important not to use hypo_maker.update_params(mod_param) here
1312
1319
# because we don't want to overwrite the memory reference!
1313
- update_param_values (hypo_maker , mod_param , update_is_fixed = True )
1320
+ if hypo_maker .__class__ .__name__ == "Detectors" :
1321
+ update_param_values_detector (hypo_maker , mod_param , update_is_fixed = True )
1322
+ else :
1323
+ update_param_values (hypo_maker , mod_param , update_is_fixed = True )
1314
1324
new_fit_info = self .fit_recursively (
1315
1325
data_dist , hypo_maker , metric , external_priors_penalty ,
1316
1326
local_fit_kwargs ["method" ], local_fit_kwargs ["method_kwargs" ],
@@ -1338,7 +1348,10 @@ def _fit_grid_scan(self, data_dist, hypo_maker, metric,
1338
1348
best_fit_result = all_fit_results [best_idx ]
1339
1349
1340
1350
if do_refined_fit :
1341
- update_param_values (hypo_maker , best_fit_result .params .free )
1351
+ if hypo_maker .__class__ .__name__ == "Detectors" :
1352
+ update_param_values_detector (hypo_maker , best_fit_result .params .free )
1353
+ else :
1354
+ update_param_values (hypo_maker , best_fit_result .params .free )
1342
1355
# the params stored in the best fit may come from a grid point where
1343
1356
# parameters were fixed, so we free them up again
1344
1357
for param in originally_free :
@@ -1507,9 +1520,14 @@ def _fit_ranges(self, data_dist, hypo_maker, metric,
1507
1520
logging .info (f"parameter with modified range:\n { mod_param } " )
1508
1521
# use update_param_values instead of hypo_maker.update_params so that we
1509
1522
# don't overwrite the internal memory reference
1510
- update_param_values (
1511
- hypo_maker , mod_param , update_range = True , update_nominal_values = True
1512
- )
1523
+ if hypo_maker .__class__ .__name__ == "Detectors" :
1524
+ update_param_values_detector (
1525
+ hypo_maker , mod_param , update_range = True , update_nominal_values = True
1526
+ )
1527
+ else :
1528
+ update_param_values (
1529
+ hypo_maker , mod_param , update_range = True , update_nominal_values = True
1530
+ )
1513
1531
fit_result = self .fit_recursively (
1514
1532
data_dist , hypo_maker , metric , external_priors_penalty ,
1515
1533
local_fit_kwargs ["method" ], local_fit_kwargs ["method_kwargs" ],
@@ -1536,10 +1554,16 @@ def _fit_ranges(self, data_dist, hypo_maker, metric,
1536
1554
# set the values of all parameters in the hypo_maker to the best fit values
1537
1555
# without overwriting the memory reference.
1538
1556
# Also reset ranges and nominal values that we might have changed above!
1539
- update_param_values (
1540
- hypo_maker , best_fit_result .params .free ,
1541
- update_range = True , update_nominal_values = True
1542
- )
1557
+ if hypo_maker .__class__ .__name__ == "Detectors" :
1558
+ update_param_values_detector (
1559
+ hypo_maker , best_fit_result .params .free ,
1560
+ update_range = True , update_nominal_values = True
1561
+ )
1562
+ else :
1563
+ update_param_values (
1564
+ hypo_maker , best_fit_result .params .free ,
1565
+ update_range = True , update_nominal_values = True
1566
+ )
1543
1567
return best_fit_result
1544
1568
1545
1569
def _fit_staged (self , data_dist , hypo_maker , metric ,
@@ -1565,9 +1589,14 @@ def _fit_staged(self, data_dist, hypo_maker, metric,
1565
1589
for i , fit_kwargs in enumerate (local_fit_kwargs ):
1566
1590
logging .info (f"Beginning fit { i + 1 } / { len (local_fit_kwargs )} " )
1567
1591
if best_fit_params is not None :
1568
- update_param_values (
1569
- hypo_maker , best_fit_params .free , update_nominal_values = True
1570
- )
1592
+ if hypo_maker .__class__ .__name__ == "Detectors" :
1593
+ update_param_values_detector (
1594
+ hypo_maker , best_fit_params .free , update_nominal_values = True
1595
+ )
1596
+ else :
1597
+ update_param_values (
1598
+ hypo_maker , best_fit_params .free , update_nominal_values = True
1599
+ )
1571
1600
best_fit_info = self .fit_recursively (
1572
1601
data_dist , hypo_maker , metric , external_priors_penalty ,
1573
1602
fit_kwargs ["method" ], fit_kwargs ["method_kwargs" ],
@@ -1588,9 +1617,14 @@ def _fit_staged(self, data_dist, hypo_maker, metric,
1588
1617
best_fit_info ._rehash ()
1589
1618
# Make sure that the hypo_maker has its params also at the best fit point
1590
1619
# with the original nominal parameter values.
1591
- update_param_values (
1592
- hypo_maker , best_fit_info .params .free , update_nominal_values = True
1593
- )
1620
+ if hypo_maker .__class__ .__name__ == "Detectors" :
1621
+ update_param_values_detector (
1622
+ hypo_maker , best_fit_info .params .free , update_nominal_values = True
1623
+ )
1624
+ else :
1625
+ update_param_values (
1626
+ hypo_maker , best_fit_info .params .free , update_nominal_values = True
1627
+ )
1594
1628
return best_fit_info
1595
1629
1596
1630
def _fit_scipy (self , data_dist , hypo_maker , metric ,
@@ -1918,6 +1952,8 @@ def annealing_callback(x, f, context):
1918
1952
rescaled_pvals = optimize_result .pop ('x' )
1919
1953
rescaled_pvals = np .where (flip_x0 , 1 - rescaled_pvals , rescaled_pvals )
1920
1954
hypo_maker ._set_rescaled_free_params (rescaled_pvals ) # pylint: disable=protected-access
1955
+ if hypo_maker .__class__ .__name__ == "Detectors" :
1956
+ update_param_values_detector (hypo_maker , hypo_maker .params .free ) #updates values for ALL detectors
1921
1957
1922
1958
# Get the best-fit metric value
1923
1959
metric_val = sign * optimize_result .pop ('fun' )
@@ -1944,6 +1980,8 @@ def annealing_callback(x, f, context):
1944
1980
# Reset to starting value of the fit, rather than nominal values because
1945
1981
# the nominal value might be out of range if this is inside an octant check.
1946
1982
hypo_maker ._set_rescaled_free_params (x0 )
1983
+ if hypo_maker .__class__ .__name__ == "Detectors" :
1984
+ update_param_values_detector (hypo_maker , hypo_maker .params .free ) #updates values for ALL detectors
1947
1985
1948
1986
# TODO: other metrics
1949
1987
fit_info = HypoFitResult (
@@ -2115,6 +2153,8 @@ def loss_func(x):
2115
2153
# values from [0,1] back to physical range)
2116
2154
rescaled_pvals = np .array (m .values )
2117
2155
hypo_maker ._set_rescaled_free_params (rescaled_pvals ) # pylint: disable=protected-access
2156
+ if hypo_maker .__class__ .__name__ == "Detectors" :
2157
+ update_param_values_detector (hypo_maker , hypo_maker .params .free ) #updates values for ALL detectors
2118
2158
2119
2159
# Get the best-fit metric value
2120
2160
metric_val = sign * m .fval
@@ -2153,6 +2193,8 @@ def loss_func(x):
2153
2193
# Reset to starting value of the fit, rather than nominal values because
2154
2194
# the nominal value might be out of range if this is inside an octant check.
2155
2195
hypo_maker ._set_rescaled_free_params (x0 )
2196
+ if hypo_maker .__class__ .__name__ == "Detectors" :
2197
+ update_param_values_detector (hypo_maker , hypo_maker .params .free ) #updates values for ALL detectors
2156
2198
2157
2199
# TODO: other metrics
2158
2200
fit_info = HypoFitResult (
@@ -2286,6 +2328,8 @@ def loss_func(x, grad):
2286
2328
# values from [0,1] back to physical range)
2287
2329
rescaled_pvals = xopt
2288
2330
hypo_maker ._set_rescaled_free_params (rescaled_pvals ) # pylint: disable=protected-access
2331
+ if hypo_maker .__class__ .__name__ == "Detectors" :
2332
+ update_param_values_detector (hypo_maker , hypo_maker .params .free ) #updates values for ALL detectors
2289
2333
2290
2334
# Get the best-fit metric value
2291
2335
metric_val = sign * opt .last_optimum_value ()
@@ -2322,6 +2366,8 @@ def loss_func(x, grad):
2322
2366
2323
2367
if self .blindness > 1 : # only at stricter blindness level
2324
2368
hypo_maker ._set_rescaled_free_params (x0 )
2369
+ if hypo_maker .__class__ .__name__ == "Detectors" :
2370
+ update_param_values_detector (hypo_maker , hypo_maker .params .free ) #updates values for ALL detectors
2325
2371
2326
2372
# TODO: other metrics
2327
2373
fit_info = HypoFitResult (
@@ -2403,6 +2449,8 @@ def ineq_func(x, grad):
2403
2449
if grad .size > 0 :
2404
2450
raise RuntimeError ("gradients not supported" )
2405
2451
hypo_maker ._set_rescaled_free_params (x )
2452
+ if hypo_maker .__class__ .__name__ == "Detectors" :
2453
+ update_param_values_detector (hypo_maker , hypo_maker .params .free ) #updates values for ALL detectors
2406
2454
# In NLOPT, the inequality function must stay negative, while in
2407
2455
# scipy, the inequality function must stay positive. We keep with
2408
2456
# the scipy convention by flipping the sign.
@@ -2518,6 +2566,8 @@ def _minimizer_callable(self, scaled_param_vals, hypo_maker, data_dist,
2518
2566
scaled_param_vals = np .where (flip_x0 , 1 - scaled_param_vals , scaled_param_vals )
2519
2567
# Set param values from the scaled versions the minimizer works with
2520
2568
hypo_maker ._set_rescaled_free_params (scaled_param_vals ) # pylint: disable=protected-access
2569
+ if hypo_maker .__class__ .__name__ == "Detectors" :
2570
+ update_param_values_detector (hypo_maker , hypo_maker .params .free ) #updates values for ALL detectors
2521
2571
2522
2572
# Get the map set
2523
2573
try :
@@ -2548,7 +2598,6 @@ def _minimizer_callable(self, scaled_param_vals, hypo_maker, data_dist,
2548
2598
#
2549
2599
try :
2550
2600
if hypo_maker .__class__ .__name__ == "Detectors" :
2551
- update_param_values_detector (hypo_maker , hypo_maker .params ) #updates values for ALL detectors
2552
2601
metric_val = 0
2553
2602
for i in range (len (hypo_maker .distribution_makers )):
2554
2603
data = data_dist [i ].metric_total (expected_values = hypo_asimov_dist [i ],
0 commit comments