implemented locking in training setup
This commit is contained in:
79
backend/data/test_base_configs.py
Normal file
79
backend/data/test_base_configs.py
Normal file
@@ -0,0 +1,79 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Test script to demonstrate base configuration loading for YOLOX models
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
|
||||
|
||||
from services.generate_yolox_exp import load_base_config
|
||||
|
||||
def test_base_configs():
|
||||
"""Test loading all base configurations"""
|
||||
models = ['yolox-s', 'yolox-m', 'yolox-l', 'yolox-x']
|
||||
|
||||
print("=" * 80)
|
||||
print("YOLOX Base Configuration Test")
|
||||
print("=" * 80)
|
||||
|
||||
for model in models:
|
||||
print(f"\n{'='*80}")
|
||||
print(f"Model: {model.upper()}")
|
||||
print(f"{'='*80}")
|
||||
|
||||
try:
|
||||
config = load_base_config(model)
|
||||
|
||||
# Group parameters by category
|
||||
arch_params = ['depth', 'width', 'activation']
|
||||
training_params = ['max_epoch', 'warmup_epochs', 'basic_lr_per_img', 'scheduler',
|
||||
'no_aug_epochs', 'min_lr_ratio']
|
||||
optimizer_params = ['momentum', 'weight_decay']
|
||||
augmentation_params = ['mosaic_prob', 'mixup_prob', 'hsv_prob', 'flip_prob',
|
||||
'degrees', 'translate', 'shear', 'mosaic_scale',
|
||||
'mixup_scale', 'enable_mixup']
|
||||
input_params = ['input_size', 'test_size', 'random_size']
|
||||
eval_params = ['eval_interval', 'print_interval']
|
||||
|
||||
print("\n[Architecture]")
|
||||
for param in arch_params:
|
||||
if param in config:
|
||||
print(f" {param:25s} = {config[param]}")
|
||||
|
||||
print("\n[Training Hyperparameters]")
|
||||
for param in training_params:
|
||||
if param in config:
|
||||
print(f" {param:25s} = {config[param]}")
|
||||
|
||||
print("\n[Optimizer]")
|
||||
for param in optimizer_params:
|
||||
if param in config:
|
||||
print(f" {param:25s} = {config[param]}")
|
||||
|
||||
print("\n[Data Augmentation]")
|
||||
for param in augmentation_params:
|
||||
if param in config:
|
||||
print(f" {param:25s} = {config[param]}")
|
||||
|
||||
print("\n[Input/Output]")
|
||||
for param in input_params:
|
||||
if param in config:
|
||||
print(f" {param:25s} = {config[param]}")
|
||||
|
||||
print("\n[Evaluation]")
|
||||
for param in eval_params:
|
||||
if param in config:
|
||||
print(f" {param:25s} = {config[param]}")
|
||||
|
||||
print(f"\n✓ Successfully loaded {len(config)} parameters")
|
||||
|
||||
except Exception as e:
|
||||
print(f"✗ Error loading config: {e}")
|
||||
|
||||
print("\n" + "="*80)
|
||||
print("Test Complete")
|
||||
print("="*80)
|
||||
|
||||
if __name__ == '__main__':
|
||||
test_base_configs()
|
||||
Reference in New Issue
Block a user