cleanup add training bell
This commit is contained in:
85
backend/services/generate_json_yolox.py
Normal file → Executable file
85
backend/services/generate_json_yolox.py
Normal file → Executable file
@@ -7,12 +7,30 @@ from models.Images import Image
|
||||
from models.Annotation import Annotation
|
||||
|
||||
def generate_training_json(training_id):
|
||||
"""Generate COCO JSON for training, validation, and test sets"""
|
||||
# training_id is now project_details_id
|
||||
training_project_details = TrainingProjectDetails.query.get(training_id)
|
||||
"""Generate COCO JSON for training, validation, and test sets
|
||||
|
||||
Args:
|
||||
training_id: Can be either a Training.id or TrainingProjectDetails.id
|
||||
Function will automatically detect which one and find the correct details_id
|
||||
"""
|
||||
from models.training import Training
|
||||
|
||||
# First, try to get as a Training record
|
||||
training_record = Training.query.get(training_id)
|
||||
|
||||
if training_record:
|
||||
# It's a Training.id - use its project_details_id
|
||||
details_id = training_record.project_details_id
|
||||
print(f'[generate_training_json] Using training_id={training_id}, mapped to project_details_id={details_id}')
|
||||
else:
|
||||
# Try as TrainingProjectDetails.id directly
|
||||
details_id = training_id
|
||||
print(f'[generate_training_json] Using training_id={training_id} as project_details_id directly')
|
||||
|
||||
training_project_details = TrainingProjectDetails.query.get(details_id)
|
||||
|
||||
if not training_project_details:
|
||||
raise Exception(f'No TrainingProjectDetails found for project_details_id {training_id}')
|
||||
raise Exception(f'No TrainingProjectDetails found for id {training_id} (details_id: {details_id})')
|
||||
|
||||
details_obj = training_project_details.to_dict()
|
||||
|
||||
@@ -110,22 +128,35 @@ def generate_training_json(training_id):
|
||||
break
|
||||
|
||||
# Construct ABSOLUTE path using data_dir
|
||||
# Normalize data_dir - ensure it uses backslashes for Windows
|
||||
normalized_data_dir = data_dir.rstrip('/\\').replace('/', '\\')
|
||||
# Detect platform for proper path handling
|
||||
import platform
|
||||
is_windows = platform.system() == 'Windows'
|
||||
|
||||
# Normalize data_dir and file_name based on platform
|
||||
if is_windows:
|
||||
# Windows: use backslashes
|
||||
normalized_data_dir = data_dir.rstrip('/\\').replace('/', '\\')
|
||||
file_name = file_name.replace('/', '\\')
|
||||
else:
|
||||
# Linux/Mac: use forward slashes
|
||||
normalized_data_dir = data_dir.rstrip('/\\').replace('\\', '/')
|
||||
file_name = file_name.replace('\\', '/')
|
||||
|
||||
# Check if already absolute path
|
||||
if not (file_name.startswith('\\\\') or (len(file_name) > 1 and file_name[1] == ':')):
|
||||
# It's a relative path, combine with data_dir
|
||||
# For UNC paths, we need to manually concatenate to preserve \\
|
||||
if normalized_data_dir.startswith('\\\\'):
|
||||
# UNC path
|
||||
file_name = normalized_data_dir + '\\' + file_name.replace('/', '\\')
|
||||
else:
|
||||
# Regular path
|
||||
file_name = os.path.join(normalized_data_dir, file_name.replace('/', '\\'))
|
||||
is_absolute = False
|
||||
if is_windows:
|
||||
is_absolute = file_name.startswith('\\\\') or (len(file_name) > 1 and file_name[1] == ':')
|
||||
else:
|
||||
# Already absolute, just normalize separators
|
||||
file_name = file_name.replace('/', '\\')
|
||||
is_absolute = file_name.startswith('/')
|
||||
|
||||
if not is_absolute:
|
||||
# It's a relative path, combine with data_dir
|
||||
if is_windows and normalized_data_dir.startswith('\\\\'):
|
||||
# Windows UNC path
|
||||
file_name = normalized_data_dir + '\\' + file_name
|
||||
else:
|
||||
# Regular path (Windows or Linux)
|
||||
file_name = os.path.join(normalized_data_dir, file_name)
|
||||
|
||||
# Get annotations for this image
|
||||
annotations = Annotation.query.filter_by(image_id=image.image_id).all()
|
||||
@@ -218,13 +249,19 @@ def generate_training_json(training_id):
|
||||
|
||||
project_name = training_project.title.replace(' ', '_') if training_project and training_project.title else f'project_{details_obj["project_id"]}'
|
||||
|
||||
# Get training record to use its name for folder
|
||||
training_record = Training.query.filter_by(project_details_id=training_id).first()
|
||||
training_folder_name = f"{training_record.exp_name or training_record.training_name or 'training'}_{training_record.id}" if training_record else str(training_id)
|
||||
training_folder_name = training_folder_name.replace(' ', '_')
|
||||
# Get training record to use its name and ID for folder and file names
|
||||
# Use the same training_id that was passed in (if it was a Training.id)
|
||||
# or find the first training for this details_id
|
||||
if not training_record:
|
||||
training_record = Training.query.filter_by(project_details_id=details_id).first()
|
||||
|
||||
# Use training_record.id for file names to match what generate_yolox_exp expects
|
||||
training_file_id = training_record.id if training_record else training_id
|
||||
if training_record:
|
||||
training_folder_name = f"{training_record.exp_name or training_record.training_name or 'training'}_{training_record.id}"
|
||||
training_folder_name = training_folder_name.replace(' ', '_')
|
||||
training_file_id = training_record.id
|
||||
else:
|
||||
training_folder_name = str(details_id)
|
||||
training_file_id = details_id
|
||||
|
||||
# Save annotations to the configured output folder
|
||||
annotations_dir = os.path.join(output_base_path, project_name, training_folder_name, 'annotations')
|
||||
@@ -242,7 +279,7 @@ def generate_training_json(training_id):
|
||||
with open(test_path, 'w') as f:
|
||||
json.dump(test_json, f, indent=2)
|
||||
|
||||
print(f'COCO JSON splits written to {annotations_dir} for trainingId {training_id}')
|
||||
print(f'COCO JSON splits written to {annotations_dir} for training_id={training_file_id} (details_id={details_id})')
|
||||
|
||||
# Also generate inference exp.py
|
||||
from services.generate_yolox_exp import generate_yolox_inference_exp
|
||||
|
||||
Reference in New Issue
Block a user