Skip to content

Commit 2ec1a5a

Browse files
raj-sinhaThe spade_anomaly_detection Authors
authored and
The spade_anomaly_detection Authors
committed
Internal update.
PiperOrigin-RevId: 703546573
1 parent e3ec5e3 commit 2ec1a5a

File tree

1 file changed

+5
-2
lines changed

1 file changed

+5
-2
lines changed

spade_anomaly_detection/runner.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,6 @@
3636
"""
3737

3838
import enum
39-
# TODO(b/247116870): Change to collections when Vertex supports python 3.9
4039
from typing import Mapping, Optional, Tuple, cast
4140

4241
from absl import logging
@@ -49,6 +48,8 @@
4948
from spade_anomaly_detection import supervised_model
5049
import tensorflow as tf
5150

51+
# TODO(b/247116870): Change to collections when Vertex supports python 3.9
52+
5253

5354
@enum.unique
5455
class DataFormat(enum.Enum):
@@ -135,6 +136,7 @@ def __init__(self, runner_parameters: parameters.RunnerParameters):
135136
else:
136137
self.supervised_model_object = None
137138

139+
# If the thresholds are not set, use the thresholds from the input table.
138140
if (
139141
self.runner_parameters.positive_threshold is None
140142
or self.runner_parameters.negative_threshold is None
@@ -760,7 +762,7 @@ def run(self) -> None:
760762
batch_size=1,
761763
)
762764
train_label_counts = self.input_data_loader.label_counts
763-
# TODO(sinharaj): This is not ideal, we should not need to read the files
765+
# This is not ideal, we should not need to read the files
764766
# again. Find a way to get the label counts without reading the files.
765767
# Assumes that data loader has already been used to read the input table.
766768
total_record_count = sum(train_label_counts.values())
@@ -885,6 +887,7 @@ def run(self) -> None:
885887
labels=updated_labels,
886888
weights=weights,
887889
)
890+
# End of pseudolabeling and supervised model training loop.
888891

889892
if not self.runner_parameters.upload_only:
890893
self.evaluate_model()

0 commit comments

Comments
 (0)