1
- import fiftyone as fo
2
- import fiftyone .brain as fob
3
- import fiftyone .core .fields as fof
4
- import fiftyone .core .labels as fol
5
- import fiftyone .core .patches as fop
6
1
import fiftyone .operators as foo
7
2
import fiftyone .operators .types as types
8
- import fiftyone .zoo .models as fozm
9
3
import numpy as np
10
4
from fiftyone import ViewField as F
11
- from fiftyone .brain import Similarity
12
5
13
6
14
7
class EvaluationPanel (foo .Panel ):
@@ -367,7 +360,7 @@ def _update_table_data(self, ctx):
367
360
new_row = {"class" : "All" , "AP" : int (results .mAP () * 1000 ) / 1000 }
368
361
mAP_list .append (new_row )
369
362
ctx .panel .set_data ("my_stack.mAP_evaluations" , mAP_list )
370
-
363
+
371
364
# Compare key DOES exist, update c_(table_name) instead
372
365
else :
373
366
c_eval = ctx .dataset .get_evaluation_info (compare_key ).serialize ()
@@ -512,9 +505,9 @@ def _update_plot_data(
512
505
ctx ,
513
506
):
514
507
# _update_plot_data is called in on_change_config
515
- # The function updates the DATA of all the plots in the panel,
508
+ # The function updates the DATA of all the plots in the panel,
516
509
# including histograms and confusion matrices# _update_plot_data is called in on_change_config
517
- # The function updates the DATA of all the plots in the panel,
510
+ # The function updates the DATA of all the plots in the panel,
518
511
# including histograms and confusion matrices
519
512
520
513
# Grab the basic info
@@ -629,25 +622,24 @@ def _update_plot_data(
629
622
],
630
623
)
631
624
632
- #Calculate recall, precision, and f1. Dont forget to check for divide by 0!
625
+ # Calculate recall, precision, and f1. Dont forget to check for divide by 0!
633
626
tp = np .array (ctx .dataset .values (f"{ eval_key } _tp" ))
634
627
fp = np .array (ctx .dataset .values (f"{ eval_key } _fp" ))
635
628
fn = np .array (ctx .dataset .values (f"{ eval_key } _fn" ))
636
629
637
630
n = tp .astype (np .float64 )
638
- d = (tp + fp ).astype (np .float64 )
639
- p = np .divide (n , d , out = np .full_like (n , np .nan ), where = d != 0 )
631
+ d = (tp + fp ).astype (np .float64 )
632
+ p = np .divide (n , d , out = np .full_like (n , np .nan ), where = d != 0 )
640
633
p = np .nan_to_num (p , nan = 0.0 )
641
634
642
-
643
635
n = tp .astype (np .float64 )
644
- d = (tp + fn ).astype (np .float64 )
645
- r = np .divide (n , d , out = np .full_like (n , np .nan ), where = d != 0 )
636
+ d = (tp + fn ).astype (np .float64 )
637
+ r = np .divide (n , d , out = np .full_like (n , np .nan ), where = d != 0 )
646
638
r = np .nan_to_num (r , nan = 0.0 )
647
639
648
640
n = (2 * (p * r )).astype (np .float64 )
649
- d = (p + r ).astype (np .float64 )
650
- f1 = np .divide (n , d , out = np .full_like (n , np .nan ), where = d != 0 )
641
+ d = (p + r ).astype (np .float64 )
642
+ f1 = np .divide (n , d , out = np .full_like (n , np .nan ), where = d != 0 )
651
643
f1 = np .nan_to_num (f1 , nan = 0.0 )
652
644
653
645
p_left_edges , p_counts , p_widths = compute_histogram (p , 10 )
@@ -736,7 +728,6 @@ def _update_plot_data(
736
728
737
729
conf = sum (conf_total ) / len (conf_total )
738
730
739
-
740
731
if tp + fp != 0 :
741
732
p = tp / (tp + fp )
742
733
p = np .nan_to_num (p , nan = 0.0 )
@@ -899,7 +890,7 @@ def _update_plot_data(
899
890
else :
900
891
r = 0
901
892
r = np .nan_to_num (r , nan = 0.0 )
902
- if p + r != 0 :
893
+ if p + r != 0 :
903
894
f1 = 2 * (p * r ) / (p + r )
904
895
else :
905
896
f1 = 0
@@ -1093,35 +1084,33 @@ def _update_plot_data(
1093
1084
c_fn = np .array (ctx .dataset .values (f"{ compare_key } _fn" ))
1094
1085
1095
1086
n = tp .astype (np .float64 )
1096
- d = (tp + fp ).astype (np .float64 )
1097
- p = np .divide (n , d , out = np .full_like (n , np .nan ), where = d != 0 )
1087
+ d = (tp + fp ).astype (np .float64 )
1088
+ p = np .divide (n , d , out = np .full_like (n , np .nan ), where = d != 0 )
1098
1089
p = np .nan_to_num (p , nan = 0.0 )
1099
1090
1100
-
1101
1091
n = tp .astype (np .float64 )
1102
- d = (tp + fn ).astype (np .float64 )
1103
- r = np .divide (n , d , out = np .full_like (n , np .nan ), where = d != 0 )
1092
+ d = (tp + fn ).astype (np .float64 )
1093
+ r = np .divide (n , d , out = np .full_like (n , np .nan ), where = d != 0 )
1104
1094
r = np .nan_to_num (r , nan = 0.0 )
1105
1095
1106
1096
n = (2 * (p * r )).astype (np .float64 )
1107
- d = (p + r ).astype (np .float64 )
1108
- f1 = np .divide (n , d , out = np .full_like (n , np .nan ), where = d != 0 )
1097
+ d = (p + r ).astype (np .float64 )
1098
+ f1 = np .divide (n , d , out = np .full_like (n , np .nan ), where = d != 0 )
1109
1099
f1 = np .nan_to_num (f1 , nan = 0.0 )
1110
1100
1111
1101
n = c_tp .astype (np .float64 )
1112
- d = (c_tp + c_fp ).astype (np .float64 )
1113
- c_p = np .divide (n , d , out = np .full_like (n , np .nan ), where = d != 0 )
1102
+ d = (c_tp + c_fp ).astype (np .float64 )
1103
+ c_p = np .divide (n , d , out = np .full_like (n , np .nan ), where = d != 0 )
1114
1104
c_p = np .nan_to_num (c_p , nan = 0.0 )
1115
1105
1116
-
1117
1106
n = c_tp .astype (np .float64 )
1118
- d = (c_tp + c_fn ).astype (np .float64 )
1119
- c_r = np .divide (n , d , out = np .full_like (n , np .nan ), where = d != 0 )
1107
+ d = (c_tp + c_fn ).astype (np .float64 )
1108
+ c_r = np .divide (n , d , out = np .full_like (n , np .nan ), where = d != 0 )
1120
1109
c_r = np .nan_to_num (r , nan = 0.0 )
1121
1110
1122
1111
n = (2 * (c_p * c_r )).astype (np .float64 )
1123
- d = (c_p + c_r ).astype (np .float64 )
1124
- c_f1 = np .divide (n , d , out = np .full_like (n , np .nan ), where = d != 0 )
1112
+ d = (c_p + c_r ).astype (np .float64 )
1113
+ c_f1 = np .divide (n , d , out = np .full_like (n , np .nan ), where = d != 0 )
1125
1114
c_f1 = np .nan_to_num (f1 , nan = 0.0 )
1126
1115
1127
1116
p_left_edges , p_counts , p_widths = compute_histogram (p , 10 )
@@ -1242,8 +1231,6 @@ def _update_plot_data(
1242
1231
1243
1232
conf = sum (conf_total ) / len (conf_total )
1244
1233
1245
-
1246
-
1247
1234
if tp + fp != 0 :
1248
1235
p = tp / (tp + fp )
1249
1236
p = np .nan_to_num (p , nan = 0.0 )
@@ -1316,7 +1303,6 @@ def _update_plot_data(
1316
1303
else :
1317
1304
c_f1 = 0
1318
1305
1319
-
1320
1306
c_p_class_list .append (c_p )
1321
1307
c_r_class_list .append (c_r )
1322
1308
c_f1_class_list .append (c_f1 )
@@ -1538,7 +1524,7 @@ def _update_plot_data(
1538
1524
else :
1539
1525
r = 0
1540
1526
r = np .nan_to_num (r , nan = 0.0 )
1541
- if p + r != 0 :
1527
+ if p + r != 0 :
1542
1528
f1 = 2 * (p * r ) / (p + r )
1543
1529
else :
1544
1530
f1 = 0
@@ -1720,7 +1706,6 @@ def _add_plots(self, ctx, stack):
1720
1706
# Start the ones that always appear regardless of model type and follow by eval_type
1721
1707
# specific ones afterwards
1722
1708
1723
-
1724
1709
# Start by grabbing some basics
1725
1710
eval_key = ctx .panel .get_state ("my_stack.menu.actions.eval_key" )
1726
1711
compare_key = ctx .panel .get_state ("my_stack.menu.actions.compare_key" , None )
@@ -1744,7 +1729,7 @@ def _add_plots(self, ctx, stack):
1744
1729
1745
1730
# After the plot layout/config is defined, add the property to the stack with the
1746
1731
# appropriate on_call and on_selected calls
1747
- #TODO add on_selected
1732
+ # TODO add on_selected
1748
1733
stack .add_property (
1749
1734
"confidence" ,
1750
1735
types .Property (
0 commit comments