tests.workflow_anomaly_detection_test
1import logging 2import os 3import shutil 4 5import fiftyone as fo 6import pytest 7from fiftyone import ViewField as F 8from fiftyone.utils.huggingface import load_from_hub 9 10import config.config 11from main import workflow_anomaly_detection 12from utils.anomaly_detection_data_preparation import AnomalyDetectionDataPreparation 13from utils.dataset_loader import _post_process_dataset 14from utils.logging import configure_logging 15 16 17@pytest.fixture(autouse=True) 18def deactivate_hf_sync(): 19 config.config.HF_DO_UPLOAD = False 20 21 22@pytest.fixture(autouse=True) 23def setup_logging(): 24 configure_logging() 25 26 27@pytest.fixture 28def dataset_v51(): 29 """Fixture to load a FiftyOne dataset from the hub.""" 30 dataset_name_hub = "Voxel51/fisheye8k" 31 dataset_name = "fisheye8k_v51_anomaly_test" 32 try: 33 dataset = load_from_hub( 34 repo_id=dataset_name_hub, max_samples=30, name=dataset_name 35 ) 36 dataset = _post_process_dataset(dataset) 37 print(f"Loaded dataset {dataset_name} from hub: {dataset_name_hub}") 38 except: 39 dataset = fo.load_dataset(dataset_name) 40 print(f"Dataset {dataset_name} was already loaded") 41 assert dataset is not None, "Failed to load or create the FiftyOne dataset" 42 43 return dataset 44 45 46def test_anomaly_detection_train(dataset_v51): 47 """Tests training pipeline of anomaly detection workflow with Padim model on dataset_v51.""" 48 prep_config = { 49 "location": "cam3", 50 "rare_classes": ["Bus"], 51 } 52 53 data_preparer = AnomalyDetectionDataPreparation( 54 dataset_v51, "fisheye8k_v51_anomaly_test", config=prep_config 55 ) 56 run_config = { 57 "model_name": "Padim", 58 "image_size": [32, 32], 59 "batch_size": 1, 60 "epochs": 1, 61 "early_stop_patience": 1, 62 "data_root": data_preparer.export_root, 63 "mode": ["train"], 64 } 65 66 eval_metrics = ["AUPR", "AUROC"] 67 dataset_info = {"name": "fisheye8k_v51_anomaly_test"} 68 69 # Delete content from field if other runs filled it already 70 results_field = "pred_anomaly_score_Padim" 71 try: 72 data_preparer.dataset_ano_dec.delete_sample_field(results_field) 73 print(f"Removed field {results_field} from dataset.") 74 except: 75 pass 76 77 workflow_anomaly_detection( 78 dataset_v51, 79 data_preparer.dataset_ano_dec, 80 dataset_info, 81 eval_metrics, 82 run_config, 83 wandb_activate=False, 84 ) 85 86 # Select all samples that are considered anomalous 87 print( 88 f"Sample fields in dataset: {data_preparer.dataset_ano_dec.get_field_schema()}" 89 ) 90 view_anomalies = data_preparer.dataset_ano_dec.match(F(results_field) >= 0) 91 n_samples_selected = len(view_anomalies) 92 print( 93 f"{n_samples_selected} samples anomalies found that were assessed by anomaly detection." 94 ) 95 assert n_samples_selected != 0, "No samples were selected through anomaly detection" 96 97 98@pytest.mark.parametrize("load_local", [True, False]) 99def test_anomaly_detection_inference(dataset_v51, load_local): 100 """Tests the anomaly detection inference workflow by running Padim model on fisheye8k dataset and verifying anomaly scores are generated.""" 101 102 if load_local == False: 103 # Delete local weights if they exist so they get downloaded from Hugging Face 104 local_folder = "./output/models/anomalib/Padim/fisheye8k" 105 if os.path.exists(local_folder): 106 try: 107 shutil.rmtree(local_folder) 108 logging.warning(f"Deleted local weights folder: {local_folder}") 109 except Exception as e: 110 logging.error(f"Error deleting local weights folder: {e}") 111 112 dataset_ano_dec = None 113 data_root = None 114 115 run_config = { 116 "model_name": "Padim", 117 "image_size": [32, 32], 118 "batch_size": 1, 119 "epochs": 1, 120 "early_stop_patience": 1, 121 "data_root": data_root, 122 "mode": ["inference"], 123 } 124 125 eval_metrics = ["AUPR", "AUROC"] 126 dataset_info = {"name": "fisheye8k"} 127 128 # Delete content from field if other runs filled it already 129 results_field = "pred_anomaly_score_Padim" 130 try: 131 dataset_v51.delete_sample_field(results_field) 132 logging.warning(f"Removed field {results_field} from dataset.") 133 except: 134 pass 135 136 workflow_anomaly_detection( 137 dataset_v51, 138 dataset_ano_dec, 139 dataset_info, 140 eval_metrics, 141 run_config, 142 wandb_activate=False, 143 ) 144 145 # Select all samples that are considered anomalous 146 logging.info(f"Sample fields in dataset: {dataset_v51.get_field_schema()}") 147 view_anomalies = dataset_v51.match(F(results_field) >= 0) 148 n_samples_selected = len(view_anomalies) 149 logging.info( 150 f"{n_samples_selected} samples anomalies found that were assessed by anomaly detection." 151 ) 152 153 assert n_samples_selected != 0, "No samples were selected through anomaly detection"
@pytest.fixture(autouse=True)
def
deactivate_hf_sync():
@pytest.fixture(autouse=True)
def
setup_logging():
@pytest.fixture
def
dataset_v51():
28@pytest.fixture 29def dataset_v51(): 30 """Fixture to load a FiftyOne dataset from the hub.""" 31 dataset_name_hub = "Voxel51/fisheye8k" 32 dataset_name = "fisheye8k_v51_anomaly_test" 33 try: 34 dataset = load_from_hub( 35 repo_id=dataset_name_hub, max_samples=30, name=dataset_name 36 ) 37 dataset = _post_process_dataset(dataset) 38 print(f"Loaded dataset {dataset_name} from hub: {dataset_name_hub}") 39 except: 40 dataset = fo.load_dataset(dataset_name) 41 print(f"Dataset {dataset_name} was already loaded") 42 assert dataset is not None, "Failed to load or create the FiftyOne dataset" 43 44 return dataset
Fixture to load a FiftyOne dataset from the hub.
def
test_anomaly_detection_train(dataset_v51):
47def test_anomaly_detection_train(dataset_v51): 48 """Tests training pipeline of anomaly detection workflow with Padim model on dataset_v51.""" 49 prep_config = { 50 "location": "cam3", 51 "rare_classes": ["Bus"], 52 } 53 54 data_preparer = AnomalyDetectionDataPreparation( 55 dataset_v51, "fisheye8k_v51_anomaly_test", config=prep_config 56 ) 57 run_config = { 58 "model_name": "Padim", 59 "image_size": [32, 32], 60 "batch_size": 1, 61 "epochs": 1, 62 "early_stop_patience": 1, 63 "data_root": data_preparer.export_root, 64 "mode": ["train"], 65 } 66 67 eval_metrics = ["AUPR", "AUROC"] 68 dataset_info = {"name": "fisheye8k_v51_anomaly_test"} 69 70 # Delete content from field if other runs filled it already 71 results_field = "pred_anomaly_score_Padim" 72 try: 73 data_preparer.dataset_ano_dec.delete_sample_field(results_field) 74 print(f"Removed field {results_field} from dataset.") 75 except: 76 pass 77 78 workflow_anomaly_detection( 79 dataset_v51, 80 data_preparer.dataset_ano_dec, 81 dataset_info, 82 eval_metrics, 83 run_config, 84 wandb_activate=False, 85 ) 86 87 # Select all samples that are considered anomalous 88 print( 89 f"Sample fields in dataset: {data_preparer.dataset_ano_dec.get_field_schema()}" 90 ) 91 view_anomalies = data_preparer.dataset_ano_dec.match(F(results_field) >= 0) 92 n_samples_selected = len(view_anomalies) 93 print( 94 f"{n_samples_selected} samples anomalies found that were assessed by anomaly detection." 95 ) 96 assert n_samples_selected != 0, "No samples were selected through anomaly detection"
Tests training pipeline of anomaly detection workflow with Padim model on dataset_v51.
@pytest.mark.parametrize('load_local', [True, False])
def
test_anomaly_detection_inference(dataset_v51, load_local):
99@pytest.mark.parametrize("load_local", [True, False]) 100def test_anomaly_detection_inference(dataset_v51, load_local): 101 """Tests the anomaly detection inference workflow by running Padim model on fisheye8k dataset and verifying anomaly scores are generated.""" 102 103 if load_local == False: 104 # Delete local weights if they exist so they get downloaded from Hugging Face 105 local_folder = "./output/models/anomalib/Padim/fisheye8k" 106 if os.path.exists(local_folder): 107 try: 108 shutil.rmtree(local_folder) 109 logging.warning(f"Deleted local weights folder: {local_folder}") 110 except Exception as e: 111 logging.error(f"Error deleting local weights folder: {e}") 112 113 dataset_ano_dec = None 114 data_root = None 115 116 run_config = { 117 "model_name": "Padim", 118 "image_size": [32, 32], 119 "batch_size": 1, 120 "epochs": 1, 121 "early_stop_patience": 1, 122 "data_root": data_root, 123 "mode": ["inference"], 124 } 125 126 eval_metrics = ["AUPR", "AUROC"] 127 dataset_info = {"name": "fisheye8k"} 128 129 # Delete content from field if other runs filled it already 130 results_field = "pred_anomaly_score_Padim" 131 try: 132 dataset_v51.delete_sample_field(results_field) 133 logging.warning(f"Removed field {results_field} from dataset.") 134 except: 135 pass 136 137 workflow_anomaly_detection( 138 dataset_v51, 139 dataset_ano_dec, 140 dataset_info, 141 eval_metrics, 142 run_config, 143 wandb_activate=False, 144 ) 145 146 # Select all samples that are considered anomalous 147 logging.info(f"Sample fields in dataset: {dataset_v51.get_field_schema()}") 148 view_anomalies = dataset_v51.match(F(results_field) >= 0) 149 n_samples_selected = len(view_anomalies) 150 logging.info( 151 f"{n_samples_selected} samples anomalies found that were assessed by anomaly detection." 152 ) 153 154 assert n_samples_selected != 0, "No samples were selected through anomaly detection"
Tests the anomaly detection inference workflow by running Padim model on fisheye8k dataset and verifying anomaly scores are generated.