@@ -375,25 +375,25 @@ def readstr(self, datastring):
375375 baselinefunctions = header [res .end () :].strip ()
376376 header = header [: res .start ()]
377377
378- ### Instantiating baseline functions
378+ # Instantiating baseline functions
379379 res = re .split (r"(?m)^#+ BaselineFunction \d+\s*(?:#.*\s+)*" , baselinefunctions )
380380 for s in res [1 :]:
381381 safebf .append (BaseFunction .factory (s , safebf ))
382382
383- ### Instantiating peak functions
383+ # Instantiating peak functions
384384 res = re .split (r"(?m)^#+ PeakFunction \d+\s*(?:#.*\s+)*" , peakfunctions )
385385 for s in res [1 :]:
386386 safepf .append (BaseFunction .factory (s , safepf ))
387387
388- ### Instantiating Baseline object
388+ # Instantiating Baseline object
389389 if re .match (r"^None$" , baselineobject ):
390390 self .baseline = None
391391 elif re .match (r"^\d+$" , baselineobject ):
392392 self .baseline = safebf [int (baselineobject )]
393393 else :
394394 self .baseline = Baseline .factory (baselineobject , safebf )
395395
396- ### Instantiating initial peaks
396+ # Instantiating initial peaks
397397 if re .match (r"^None$" , initial_peaks ):
398398 self .initial_peaks = None
399399 else :
@@ -402,7 +402,7 @@ def readstr(self, datastring):
402402 for s in res [1 :]:
403403 self .initial_peaks .append (Peak .factory (s , safepf ))
404404
405- ### Instantiating srmise metatdata
405+ # Instantiating srmise metatdata
406406
407407 # pf
408408 res = re .search (r"^pf=(.*)$" , srmisemetadata , re .M )
@@ -426,10 +426,10 @@ def readstr(self, datastring):
426426 res = re .search (r"^Range=(.*)$" , srmisemetadata , re .M )
427427 self .rng = eval (res .groups ()[0 ].strip ())
428428
429- ### Instantiating other metadata
429+ # Instantiating other metadata
430430 self .readmetadata (metadata )
431431
432- ### Instantiating start data
432+ # Instantiating start data
433433 # read actual data - x, y, dx, dy, plus effective_dy
434434 arrays = []
435435 if hasx :
@@ -478,7 +478,7 @@ def readstr(self, datastring):
478478 if hasedy :
479479 self .effective_dy = np .array (self .effective_dy )
480480
481- ### Instantiating results
481+ # Instantiating results
482482 res = re .search (r"^#+ ModelCluster\s*(?:#.*\s+)*" , results , re .M )
483483 if res :
484484 mc = results [res .end () :].strip ()
@@ -638,7 +638,7 @@ def writestr(self):
638638 line .append ("%g" % self .effective_dy [i ])
639639 lines .append (" " .join (line ))
640640
641- ### Calculated members
641+ # Calculated members
642642 lines .append ("##### Results" )
643643 lines .append ("extraction_type=%s" % repr (self .extraction_type ))
644644
@@ -792,8 +792,8 @@ def extract_single(self, recursion_depth=1):
792792
793793 stepcounter = 0
794794
795- ### #########################
796- ### Main extraction loop ###
795+ # #########################
796+ # Main extraction loop ###
797797 for step in dclusters :
798798
799799 stepcounter += 1
@@ -839,7 +839,7 @@ def extract_single(self, recursion_depth=1):
839839 # three clusters can become adjacent at any given step.
840840 assert len (adjacent ) <= 3
841841
842- ### Update cluster fits ###
842+ # Update cluster fits ###
843843 # 1. Refit clusters adjacent to at least one other cluster.
844844 for a in adjacent :
845845 mclusters [a ].fit (justify = True )
@@ -922,7 +922,7 @@ def extract_single(self, recursion_depth=1):
922922 near_peaks = Peaks ([full_cluster .model [i ] for i in near_peaks ])
923923 other_peaks = Peaks ([full_cluster .model [i ] for i in other_peaks ])
924924
925- ### Remove contribution of peaks outside neighborhood
925+ # Remove contribution of peaks outside neighborhood
926926 # Define range of fitting/recursion to the interpeak range
927927 # The adjusted error is passed unchanged. This may introduce
928928 # a few more peaks than is justified, but they can be pruned
@@ -985,7 +985,7 @@ def extract_single(self, recursion_depth=1):
985985 # Incorporate best peaks from recursive search.
986986 adj_cluster .augment (rec )
987987
988- ### Select which model to use
988+ # Select which model to use
989989 full_cluster .model = other_peaks
990990 full_cluster .replacepeaks (adj_cluster .model )
991991 full_cluster .fit (True )
@@ -1001,9 +1001,9 @@ def extract_single(self, recursion_depth=1):
10011001 logger .debug ("\n " .join (msg ), mclusters [step .lastcluster_idx ], full_cluster )
10021002
10031003 mclusters [step .lastcluster_idx ] = full_cluster
1004- ### End update cluster fits ###
1004+ # End update cluster fits ###
10051005
1006- ### Combine adjacent clusters ###
1006+ # Combine adjacent clusters ###
10071007
10081008 # Iterate in reverse order to preserve earlier indices
10091009 for idx in adjacent [- 1 :0 :- 1 ]:
@@ -1065,7 +1065,7 @@ def extract_single(self, recursion_depth=1):
10651065 near_peaks = Peaks ([new_cluster .model [i ] for i in near_peaks ])
10661066 other_peaks = Peaks ([new_cluster .model [i ] for i in other_peaks ])
10671067
1068- ### Remove contribution of peaks outside neighborhood
1068+ # Remove contribution of peaks outside neighborhood
10691069 # Define range of fitting/recursion to the interpeak range
10701070 # The adjusted error is passed unchanged. This may introduce
10711071 # a few more peaks than is justified, but they can be pruned
@@ -1075,7 +1075,7 @@ def extract_single(self, recursion_depth=1):
10751075 adj_y = y [adj_slice ] - other_peaks .value (adj_x )
10761076 adj_error = dy [adj_slice ]
10771077
1078- ### # Perform recursion on a version that is scaled at the
1078+ # # Perform recursion on a version that is scaled at the
10791079 # border, as well as on that is simply fit beforehand. In
10801080 # many cases these lead to nearly identical results, but
10811081 # occasionally one works much better than the other.
@@ -1194,7 +1194,7 @@ def extract_single(self, recursion_depth=1):
11941194 # Incorporate best peaks from recursive search.
11951195 adj_cluster2 .augment (rec2 )
11961196
1197- ### Select which model to use
1197+ # Select which model to use
11981198 new_cluster .model = other_peaks
11991199 rej_cluster = ModelCluster (new_cluster )
12001200 q1 = adj_cluster1 .quality (self .error_method )
@@ -1224,16 +1224,16 @@ def extract_single(self, recursion_depth=1):
12241224 mclusters [idx - 1 ] = new_cluster
12251225 del mclusters [idx ]
12261226
1227- ### End combine adjacent clusters loop ###
1227+ # End combine adjacent clusters loop ###
12281228
12291229 # Finally, combine clusters in dclusters
12301230 if len (adjacent ) > 0 :
12311231 step .combine_clusters ([adjacent ])
12321232
12331233 tracer .emit (* mclusters )
12341234
1235- ### End main extraction loop ###
1236- ### #############################
1235+ # End main extraction loop ###
1236+ # #############################
12371237
12381238 # Put initial peaks back in
12391239 mclusters [0 ].addexternalpeaks (ip )
0 commit comments