initial push
This commit is contained in:
92
backend/services/fetch-labelstudio.js
Normal file
92
backend/services/fetch-labelstudio.js
Normal file
@@ -0,0 +1,92 @@
|
||||
const API_URL = 'http://192.168.1.19:8080/api';
|
||||
const API_TOKEN = 'c1cef980b7c73004f4ee880a42839313b863869f';
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
const fetch = require('node-fetch');
|
||||
|
||||
async function fetchLableStudioProject(projectid) {
|
||||
// 1. Trigger export
|
||||
const exportUrl = `${API_URL}/projects/${projectid}/export?exportType=JSON_MIN`;
|
||||
const headers = { Authorization: `Token ${API_TOKEN}` };
|
||||
let res = await fetch(exportUrl, { headers });
|
||||
if (!res.ok) {
|
||||
let errorText = await res.text().catch(() => '');
|
||||
console.error(`Failed to trigger export: ${res.status} ${res.statusText} - ${errorText}`);
|
||||
throw new Error(`Failed to trigger export: ${res.status} ${res.statusText}`);
|
||||
}
|
||||
let data = await res.json();
|
||||
// If data is an array, it's ready
|
||||
if (Array.isArray(data)) return data;
|
||||
// If not, poll for the export file
|
||||
let fileUrl = data.download_url || data.url || null;
|
||||
let tries = 0;
|
||||
while (!fileUrl && tries < 20) {
|
||||
await new Promise(r => setTimeout(r, 2000));
|
||||
res = await fetch(exportUrl, { headers });
|
||||
if (!res.ok) {
|
||||
let errorText = await res.text().catch(() => '');
|
||||
console.error(`Failed to poll export: ${res.status} ${res.statusText} - ${errorText}`);
|
||||
throw new Error(`Failed to poll export: ${res.status} ${res.statusText}`);
|
||||
}
|
||||
data = await res.json();
|
||||
fileUrl = data.download_url || data.url || null;
|
||||
tries++;
|
||||
}
|
||||
if (!fileUrl) throw new Error('Label Studio export did not become ready');
|
||||
// 2. Download the export file
|
||||
res = await fetch(fileUrl.startsWith('http') ? fileUrl : `${API_URL.replace('/api','')}${fileUrl}`, { headers });
|
||||
if (!res.ok) {
|
||||
let errorText = await res.text().catch(() => '');
|
||||
console.error(`Failed to download export: ${res.status} ${res.statusText} - ${errorText}`);
|
||||
throw new Error(`Failed to download export: ${res.status} ${res.statusText}`);
|
||||
}
|
||||
return await res.json();
|
||||
}
|
||||
|
||||
|
||||
|
||||
async function fetchProjectIdsAndTitles() {
|
||||
try {
|
||||
const response = await fetch(`${API_URL}/projects/`, {
|
||||
headers: {
|
||||
'Authorization': `Token ${API_TOKEN}`,
|
||||
'Content-Type': 'application/json'
|
||||
}
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
let errorText = await response.text().catch(() => '');
|
||||
console.error(`Failed to fetch projects: ${response.status} ${response.statusText} - ${errorText}`);
|
||||
throw new Error(`HTTP error! status: ${response.status}`);
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
|
||||
if (!data.results || !Array.isArray(data.results)) {
|
||||
throw new Error('API response does not contain results array');
|
||||
}
|
||||
|
||||
// Extract id and title from each project
|
||||
const projects = data.results.map(project => ({
|
||||
id: project.id,
|
||||
title: project.title
|
||||
}));
|
||||
console.log(projects)
|
||||
return projects;
|
||||
|
||||
} catch (error) {
|
||||
console.error('Failed to fetch projects:', error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { fetchLableStudioProject, fetchProjectIdsAndTitles };
|
||||
|
||||
|
||||
|
||||
//getLableStudioProject(20)
|
||||
//fetchProjectIdsAndTitles()
|
||||
176
backend/services/generate-json-yolox.js
Normal file
176
backend/services/generate-json-yolox.js
Normal file
@@ -0,0 +1,176 @@
|
||||
const TrainingProject = require('../models/TrainingProject.js');
|
||||
const TrainingProjectDetails = require('../models/TrainingProjectDetails.js')
|
||||
const LabelStudioProject = require('../models/LabelStudioProject.js')
|
||||
const Annotation = require('../models/Annotation.js')
|
||||
const Images = require('../models/Images.js')
|
||||
const fs = require('fs');
|
||||
|
||||
|
||||
async function generateTrainingJson(trainingId){
|
||||
// trainingId is now project_details_id
|
||||
const trainingProjectDetails = await TrainingProjectDetails.findByPk(trainingId);
|
||||
if (!trainingProjectDetails) throw new Error('No TrainingProjectDetails found for project_details_id ' + trainingId);
|
||||
const detailsObj = trainingProjectDetails.get({ plain: true });
|
||||
// Get parent project for name
|
||||
const trainingProject = await TrainingProject.findByPk(detailsObj.project_id);
|
||||
// Get split percentages (assume they are stored as train_percent, valid_percent, test_percent)
|
||||
const trainPercent = detailsObj.train_percent || 85;
|
||||
const validPercent = detailsObj.valid_percent || 10;
|
||||
const testPercent = detailsObj.test_percent || 5;
|
||||
|
||||
let cocoImages = [];
|
||||
let cocoAnnotations = [];
|
||||
let cocoCategories = [];
|
||||
let categoryMap = {};
|
||||
let categoryId = 0;
|
||||
let imageid = 0;
|
||||
let annotationid = 0;
|
||||
|
||||
for (const cls of detailsObj.class_map) {
|
||||
const asgMap = [];
|
||||
const listAsg = cls[1];
|
||||
for(const asg of listAsg){
|
||||
asgMap.push ({ original: asg[0], mapped: asg[1] });
|
||||
// Build category list and mapping
|
||||
if (asg[1] && !(asg[1] in categoryMap)) {
|
||||
categoryMap[asg[1]] = categoryId;
|
||||
cocoCategories.push({ id: categoryId, name: asg[1], supercategory: '' });
|
||||
categoryId++;
|
||||
}
|
||||
}
|
||||
const images = await Images.findAll({ where: { project_id: cls[0] } });
|
||||
for(const image of images){
|
||||
imageid += 1;
|
||||
let fileName = image.image_path;
|
||||
if (fileName.includes('%20')) {
|
||||
fileName = fileName.replace(/%20/g, ' ');
|
||||
}
|
||||
if (fileName && fileName.startsWith('/data/local-files/?d=')) {
|
||||
fileName = fileName.replace('/data/local-files/?d=', '');
|
||||
fileName = fileName.replace('/home/kitraining/home/kitraining/', '');
|
||||
}
|
||||
if (fileName && fileName.startsWith('home/kitraining/To_Annotate/')) {
|
||||
fileName = fileName.replace('home/kitraining/To_Annotate/','');
|
||||
}
|
||||
// Get annotations for this image
|
||||
const annotations = await Annotation.findAll({ where: { image_id: image.image_id } });
|
||||
// Use image.width and image.height from DB (populated from original_width/original_height)
|
||||
cocoImages.push({
|
||||
id: imageid,
|
||||
file_name: fileName,
|
||||
width: image.width || 0,
|
||||
height: image.height || 0
|
||||
});
|
||||
for (const annotation of annotations) {
|
||||
// Translate class name using asgMap
|
||||
let mappedClass = annotation.Label;
|
||||
for (const mapEntry of asgMap) {
|
||||
if (annotation.Label === mapEntry.original) {
|
||||
mappedClass = mapEntry.mapped;
|
||||
break;
|
||||
}
|
||||
}
|
||||
// Only add annotation if mappedClass is valid
|
||||
if (mappedClass && mappedClass in categoryMap) {
|
||||
annotationid += 1;
|
||||
let area = 0;
|
||||
if (annotation.width && annotation.height) {
|
||||
area = annotation.width * annotation.height;
|
||||
}
|
||||
cocoAnnotations.push({
|
||||
id: annotationid,
|
||||
image_id: imageid,
|
||||
category_id: categoryMap[mappedClass],
|
||||
bbox: [annotation.x, annotation.y, annotation.width, annotation.height],
|
||||
area: area,
|
||||
iscrowd: annotation.iscrowd || 0
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Shuffle images for random split using seed
|
||||
function seededRandom(seed) {
|
||||
let x = Math.sin(seed++) * 10000;
|
||||
return x - Math.floor(x);
|
||||
}
|
||||
function shuffle(array, seed) {
|
||||
for (let i = array.length - 1; i > 0; i--) {
|
||||
const j = Math.floor(seededRandom(seed + i) * (i + 1));
|
||||
[array[i], array[j]] = [array[j], array[i]];
|
||||
}
|
||||
}
|
||||
// Use seed from detailsObj if present, else default to 42
|
||||
const splitSeed = detailsObj.seed !== undefined && detailsObj.seed !== null ? Number(detailsObj.seed) : 42;
|
||||
shuffle(cocoImages, splitSeed);
|
||||
|
||||
// Split images
|
||||
const totalImages = cocoImages.length;
|
||||
const trainCount = Math.floor(totalImages * trainPercent / 100);
|
||||
const validCount = Math.floor(totalImages * validPercent / 100);
|
||||
const testCount = totalImages - trainCount - validCount;
|
||||
|
||||
const trainImages = cocoImages.slice(0, trainCount);
|
||||
const validImages = cocoImages.slice(trainCount, trainCount + validCount);
|
||||
const testImages = cocoImages.slice(trainCount + validCount);
|
||||
|
||||
// Helper to get image ids for each split
|
||||
const trainImageIds = new Set(trainImages.map(img => img.id));
|
||||
const validImageIds = new Set(validImages.map(img => img.id));
|
||||
const testImageIds = new Set(testImages.map(img => img.id));
|
||||
|
||||
// Split annotations
|
||||
const trainAnnotations = cocoAnnotations.filter(ann => trainImageIds.has(ann.image_id));
|
||||
const validAnnotations = cocoAnnotations.filter(ann => validImageIds.has(ann.image_id));
|
||||
const testAnnotations = cocoAnnotations.filter(ann => testImageIds.has(ann.image_id));
|
||||
|
||||
// Build final COCO JSONs with info section
|
||||
const buildCocoJson = (images, annotations, categories) => ({
|
||||
images,
|
||||
annotations,
|
||||
categories
|
||||
});
|
||||
|
||||
// Build COCO JSONs with info section
|
||||
const trainJson = buildCocoJson(trainImages, trainAnnotations, cocoCategories);
|
||||
const validJson = buildCocoJson(validImages, validAnnotations, cocoCategories);
|
||||
const testJson = buildCocoJson(testImages, testAnnotations, cocoCategories);
|
||||
|
||||
// Create output directory: projectname/trainingid/annotations
|
||||
const projectName = trainingProject && trainingProject.name ? trainingProject.name.replace(/\s+/g, '_') : `project_${detailsObj.project_id}`;
|
||||
const outDir = `${projectName}/${trainingId}`;
|
||||
const annotationsDir = `/home/kitraining/To_Annotate/annotations`;
|
||||
if (!fs.existsSync(annotationsDir)) {
|
||||
fs.mkdirSync(annotationsDir, { recursive: true });
|
||||
}
|
||||
|
||||
// Write to files in the annotations directory
|
||||
const trainPath = `${annotationsDir}/coco_project_${trainingId}_train.json`;
|
||||
const validPath = `${annotationsDir}/coco_project_${trainingId}_valid.json`;
|
||||
const testPath = `${annotationsDir}/coco_project_${trainingId}_test.json`;
|
||||
fs.writeFileSync(trainPath, JSON.stringify(trainJson, null, 2));
|
||||
fs.writeFileSync(validPath, JSON.stringify(validJson, null, 2));
|
||||
fs.writeFileSync(testPath, JSON.stringify(testJson, null, 2));
|
||||
console.log(`COCO JSON splits written to ${annotationsDir} for trainingId ${trainingId}`);
|
||||
|
||||
|
||||
|
||||
// Also generate inference exp.py in the same output directory as exp.py (project folder in workspace)
|
||||
const { generateYoloxInferenceExp } = require('./generate-yolox-exp');
|
||||
const path = require('path');
|
||||
const projectFolder = path.join(__dirname, '..', projectName, String(trainingId));
|
||||
if (!fs.existsSync(projectFolder)) {
|
||||
fs.mkdirSync(projectFolder, { recursive: true });
|
||||
}
|
||||
const inferenceExpPath = path.join(projectFolder, 'exp_infer.py');
|
||||
generateYoloxInferenceExp(trainingId).then(expContent => {
|
||||
fs.writeFileSync(inferenceExpPath, expContent);
|
||||
console.log(`Inference exp.py written to ${inferenceExpPath}`);
|
||||
}).catch(err => {
|
||||
console.error('Failed to generate inference exp.py:', err);
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
module.exports = {generateTrainingJson};
|
||||
135
backend/services/generate-yolox-exp.js
Normal file
135
backend/services/generate-yolox-exp.js
Normal file
@@ -0,0 +1,135 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const Training = require('../models/training.js');
|
||||
const TrainingProject = require('../models/TrainingProject.js');
|
||||
|
||||
// Remove Python comments and legacy code
|
||||
const exp_names = [
|
||||
'YOLOX-s',
|
||||
'YOLOX-m',
|
||||
'YOLOX-l',
|
||||
'YOLOX-x',
|
||||
'YOLOX-Darknet53', //todo
|
||||
'YOLOX-Nano',
|
||||
'YOLOX-Tiny'
|
||||
]
|
||||
|
||||
//TODO: Clean up generation of exp_names.py and remove second exp creation!!!
|
||||
|
||||
|
||||
// Refactored: Accept trainingId, fetch info from DB
|
||||
async function generateYoloxExp(trainingId) {
|
||||
// Fetch training row from DB by project_details_id if not found by PK
|
||||
let training = await Training.findByPk(trainingId);
|
||||
if (!training) {
|
||||
training = await Training.findOne({ where: { project_details_id: trainingId } });
|
||||
}
|
||||
if (!training) throw new Error('Training not found for trainingId or project_details_id: ' + trainingId);
|
||||
|
||||
// If transfer_learning is 'coco', just return the path to the default exp.py
|
||||
if (training.transfer_learning === 'coco') {
|
||||
const selectedModel = training.selected_model.toLowerCase().replace('-', '_');
|
||||
const expSourcePath = `/home/kitraining/Yolox/YOLOX-main/exps/default/${selectedModel}.py`;
|
||||
if (!fs.existsSync(expSourcePath)) {
|
||||
throw new Error(`Default exp.py not found for model: ${selectedModel} at ${expSourcePath}`);
|
||||
}
|
||||
// Copy to project folder (e.g., /home/kitraining/coco_tool/backend/project_XX/YY/exp.py)
|
||||
const projectDetailsId = training.project_details_id;
|
||||
const projectFolder = path.resolve(__dirname, `../project_23/${projectDetailsId}`);
|
||||
if (!fs.existsSync(projectFolder)) {
|
||||
fs.mkdirSync(projectFolder, { recursive: true });
|
||||
}
|
||||
const expDestPath = path.join(projectFolder, 'exp.py');
|
||||
fs.copyFileSync(expSourcePath, expDestPath);
|
||||
return { type: 'default', expPath: expDestPath };
|
||||
}
|
||||
|
||||
// If transfer_learning is 'sketch', generate a custom exp.py as before
|
||||
if (training.transfer_learning === 'sketch') {
|
||||
// ...existing custom exp.py generation logic here (copy from previous implementation)...
|
||||
// For brevity, you can call generateYoloxInferenceExp or similar here, or inline the logic.
|
||||
// Example:
|
||||
const expContent = await generateYoloxInferenceExp(trainingId);
|
||||
return { type: 'custom', expContent };
|
||||
}
|
||||
|
||||
throw new Error('Unknown transfer_learning type: ' + training.transfer_learning);
|
||||
}
|
||||
|
||||
async function saveYoloxExp(trainingId, outPath) {
|
||||
const expResult = await generateYoloxExp(trainingId);
|
||||
if (expResult.type === 'custom' && expResult.expContent) {
|
||||
fs.writeFileSync(outPath, expResult.expContent);
|
||||
return outPath;
|
||||
} else if (expResult.type === 'default' && expResult.expPath) {
|
||||
// Optionally copy the file if outPath is different
|
||||
if (expResult.expPath !== outPath) {
|
||||
fs.copyFileSync(expResult.expPath, outPath);
|
||||
}
|
||||
return outPath;
|
||||
} else {
|
||||
throw new Error('Unknown expResult type or missing content');
|
||||
}
|
||||
}
|
||||
|
||||
// Generate a second exp.py for inference, using the provided template and DB values
|
||||
async function generateYoloxInferenceExp(trainingId, options = {}) {
|
||||
let training = await Training.findByPk(trainingId);
|
||||
if (!training) {
|
||||
training = await Training.findOne({ where: { project_details_id: trainingId } });
|
||||
}
|
||||
if (!training) throw new Error('Training not found for trainingId or project_details_id: ' + trainingId);
|
||||
// Always use the trainingId (project_details_id) for annotation file names
|
||||
const projectDetailsId = training.project_details_id;
|
||||
const dataDir = options.data_dir || '/home/kitraining/To_Annotate/';
|
||||
const trainAnn = options.train_ann || `coco_project_${trainingId}_train.json`;
|
||||
const valAnn = options.val_ann || `coco_project_${trainingId}_valid.json`;
|
||||
const testAnn = options.test_ann || `coco_project_${trainingId}_test.json`;
|
||||
// Get num_classes from TrainingProject.classes JSON
|
||||
let numClasses = 80;
|
||||
try {
|
||||
const trainingProject = await TrainingProject.findByPk(projectDetailsId);
|
||||
if (trainingProject && trainingProject.classes) {
|
||||
let classesArr = trainingProject.classes;
|
||||
if (typeof classesArr === 'string') {
|
||||
classesArr = JSON.parse(classesArr);
|
||||
}
|
||||
if (Array.isArray(classesArr)) {
|
||||
numClasses = classesArr.filter(c => c !== null && c !== undefined && c !== '').length;
|
||||
} else if (typeof classesArr === 'object' && classesArr !== null) {
|
||||
numClasses = Object.keys(classesArr).filter(k => classesArr[k] !== null && classesArr[k] !== undefined && classesArr[k] !== '').length;
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
console.warn('Could not determine num_classes from TrainingProject.classes:', e);
|
||||
}
|
||||
const depth = options.depth || training.depth || 1.00;
|
||||
const width = options.width || training.width || 1.00;
|
||||
const inputSize = options.input_size || training.input_size || [640, 640];
|
||||
const mosaicScale = options.mosaic_scale || training.mosaic_scale || [0.1, 2];
|
||||
const randomSize = options.random_size || training.random_size || [10, 20];
|
||||
const testSize = options.test_size || training.test_size || [640, 640];
|
||||
const expName = options.exp_name || 'inference_exp';
|
||||
const enableMixup = options.enable_mixup !== undefined ? options.enable_mixup : false;
|
||||
let expContent = '';
|
||||
expContent += `#!/usr/bin/env python3\n# -*- coding:utf-8 -*-\n# Copyright (c) Megvii, Inc. and its affiliates.\n\nimport os\n\nfrom yolox.exp import Exp as MyExp\n\n\nclass Exp(MyExp):\n def __init__(self):\n super(Exp, self).__init__()\n self.data_dir = "${dataDir}"\n self.train_ann = "${trainAnn}"\n self.val_ann = "${valAnn}"\n self.test_ann = "coco_project_${trainingId}_test.json"\n self.num_classes = ${numClasses}\n`;
|
||||
// Set pretrained_ckpt if transfer_learning is 'coco'
|
||||
if (training.transfer_learning && typeof training.transfer_learning === 'string' && training.transfer_learning.toLowerCase() === 'coco') {
|
||||
const yoloxBaseDir = '/home/kitraining/Yolox/YOLOX-main';
|
||||
const selectedModel = training.selected_model ? training.selected_model.replace(/\.pth$/i, '') : '';
|
||||
if (selectedModel) {
|
||||
expContent += ` self.pretrained_ckpt = r'${yoloxBaseDir}/pretrained/${selectedModel}.pth'\n`;
|
||||
}
|
||||
}
|
||||
expContent += ` self.depth = ${depth}\n self.width = ${width}\n self.input_size = (${Array.isArray(inputSize) ? inputSize.join(', ') : inputSize})\n self.mosaic_scale = (${Array.isArray(mosaicScale) ? mosaicScale.join(', ') : mosaicScale})\n self.random_size = (${Array.isArray(randomSize) ? randomSize.join(', ') : randomSize})\n self.test_size = (${Array.isArray(testSize) ? testSize.join(', ') : testSize})\n self.exp_name = os.path.split(os.path.realpath(__file__))[1].split(".")[0]\n self.enable_mixup = ${enableMixup ? 'True' : 'False'}\n`;
|
||||
return expContent;
|
||||
}
|
||||
|
||||
// Save inference exp.py to a custom path
|
||||
async function saveYoloxInferenceExp(trainingId, outPath, options = {}) {
|
||||
const expContent = await generateYoloxInferenceExp(trainingId, options);
|
||||
fs.writeFileSync(outPath, expContent);
|
||||
return outPath;
|
||||
}
|
||||
|
||||
module.exports = { generateYoloxExp, saveYoloxExp, generateYoloxInferenceExp, saveYoloxInferenceExp };
|
||||
48
backend/services/push-yolox-exp.js
Normal file
48
backend/services/push-yolox-exp.js
Normal file
@@ -0,0 +1,48 @@
|
||||
const Training = require('../models/training.js');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
async function pushYoloxExpToDb(settings) {
|
||||
// Normalize boolean and array fields for DB
|
||||
const normalized = { ...settings };
|
||||
// Map 'act' from frontend to 'activation' for DB
|
||||
if (normalized.act !== undefined) {
|
||||
normalized.activation = normalized.act;
|
||||
delete normalized.act;
|
||||
}
|
||||
// Convert 'on'/'off' to boolean for save_history_ckpt
|
||||
if (typeof normalized.save_history_ckpt === 'string') {
|
||||
normalized.save_history_ckpt = normalized.save_history_ckpt === 'on' ? true : false;
|
||||
}
|
||||
// Convert comma-separated strings to arrays for input_size, test_size, mosaic_scale, mixup_scale
|
||||
['input_size', 'test_size', 'mosaic_scale', 'mixup_scale'].forEach(key => {
|
||||
if (typeof normalized[key] === 'string') {
|
||||
const arr = normalized[key].split(',').map(v => parseFloat(v.trim()));
|
||||
normalized[key] = arr.length === 1 ? arr[0] : arr;
|
||||
}
|
||||
});
|
||||
// Find TrainingProjectDetails for this project
|
||||
const TrainingProjectDetails = require('../models/TrainingProjectDetails.js');
|
||||
const details = await TrainingProjectDetails.findOne({ where: { project_id: normalized.project_id } });
|
||||
if (!details) throw new Error('TrainingProjectDetails not found for project_id ' + normalized.project_id);
|
||||
normalized.project_details_id = details.id;
|
||||
// Create DB row
|
||||
const training = await Training.create(normalized);
|
||||
return training;
|
||||
}
|
||||
|
||||
async function generateYoloxExpFromDb(trainingId) {
|
||||
// Fetch training row from DB
|
||||
const training = await Training.findByPk(trainingId);
|
||||
if (!training) throw new Error('Training not found');
|
||||
// Template for exp.py
|
||||
const expTemplate = `#!/usr/bin/env python3\n# Copyright (c) Megvii Inc. All rights reserved.\n\nimport os\nimport random\n\nimport torch\nimport torch.distributed as dist\nimport torch.nn as nn\n\nfrom .base_exp import BaseExp\n\n__all__ = [\"Exp\", \"check_exp_value\"]\n\nclass Exp(BaseExp):\n def __init__(self):\n super().__init__()\n\n # ---------------- model config ---------------- #\n self.num_classes = ${training.num_classes || 80}\n self.depth = ${training.depth || 1.00}\n self.width = ${training.width || 1.00}\n self.act = \"${training.activation || training.act || 'silu'}\"\n\n # ---------------- dataloader config ---------------- #\n self.data_num_workers = ${training.data_num_workers || 4}\n self.input_size = (${Array.isArray(training.input_size) ? training.input_size.join(', ') : '640, 640'})\n self.multiscale_range = ${training.multiscale_range || 5}\n self.data_dir = ${training.data_dir ? `\"${training.data_dir}\"` : 'None'}\n self.train_ann = \"${training.train_ann || 'instances_train2017.json'}\"\n self.val_ann = \"${training.val_ann || 'instances_val2017.json'}\"\n self.test_ann = \"${training.test_ann || 'instances_test2017.json'}\"\n\n # --------------- transform config ----------------- #\n self.mosaic_prob = ${training.mosaic_prob !== undefined ? training.mosaic_prob : 1.0}\n self.mixup_prob = ${training.mixup_prob !== undefined ? training.mixup_prob : 1.0}\n self.hsv_prob = ${training.hsv_prob !== undefined ? training.hsv_prob : 1.0}\n self.flip_prob = ${training.flip_prob !== undefined ? training.flip_prob : 0.5}\n self.degrees = ${training.degrees !== undefined ? training.degrees : 10.0}\n self.translate = ${training.translate !== undefined ? training.translate : 0.1}\n self.mosaic_scale = (${Array.isArray(training.mosaic_scale) ? training.mosaic_scale.join(', ') : '0.1, 2'})\n self.enable_mixup = ${training.enable_mixup !== undefined ? training.enable_mixup : true}\n self.mixup_scale = (${Array.isArray(training.mixup_scale) ? training.mixup_scale.join(', ') : '0.5, 1.5'})\n self.shear = ${training.shear !== undefined ? training.shear : 2.0}\n\n # -------------- training config --------------------- #\n self.warmup_epochs = ${training.warmup_epochs !== undefined ? training.warmup_epochs : 5}\n self.max_epoch = ${training.max_epoch !== undefined ? training.max_epoch : 300}\n self.warmup_lr = ${training.warmup_lr !== undefined ? training.warmup_lr : 0}\n self.min_lr_ratio = ${training.min_lr_ratio !== undefined ? training.min_lr_ratio : 0.05}\n self.basic_lr_per_img = ${training.basic_lr_per_img !== undefined ? training.basic_lr_per_img : 0.01 / 64.0}\n self.scheduler = \"${training.scheduler || 'yoloxwarmcos'}\"\n self.no_aug_epochs = ${training.no_aug_epochs !== undefined ? training.no_aug_epochs : 15}\n self.ema = ${training.ema !== undefined ? training.ema : true}\n self.weight_decay = ${training.weight_decay !== undefined ? training.weight_decay : 5e-4}\n self.momentum = ${training.momentum !== undefined ? training.momentum : 0.9}\n self.print_interval = ${training.print_interval !== undefined ? training.print_interval : 10}\n self.eval_interval = ${training.eval_interval !== undefined ? training.eval_interval : 10}\n self.save_history_ckpt = ${training.save_history_ckpt !== undefined ? training.save_history_ckpt : true}\n self.exp_name = os.path.split(os.path.realpath(__file__))[1].split(\".\")[0]\n\n # ----------------- testing config ------------------ #\n self.test_size = (${Array.isArray(training.test_size) ? training.test_size.join(', ') : '640, 640'})\n self.test_conf = ${training.test_conf !== undefined ? training.test_conf : 0.01}\n self.nmsthre = ${training.nmsthre !== undefined ? training.nmsthre : 0.65}\n\n # ... rest of the template ...\n\ndef check_exp_value(exp: Exp):\n h, w = exp.input_size\n assert h % 32 == 0 and w % 32 == 0, \"input size must be multiples of 32\"\n`;
|
||||
// Save to file in output directory
|
||||
const outDir = path.join(__dirname, '../../', training.project_id ? `project_${training.project_id}/${trainingId}` : 'exp_files');
|
||||
if (!fs.existsSync(outDir)) fs.mkdirSync(outDir, { recursive: true });
|
||||
const filePath = path.join(outDir, 'exp.py');
|
||||
fs.writeFileSync(filePath, expTemplate);
|
||||
return filePath;
|
||||
}
|
||||
|
||||
module.exports = { pushYoloxExpToDb, generateYoloxExpFromDb };
|
||||
120
backend/services/seed-label-studio.js
Normal file
120
backend/services/seed-label-studio.js
Normal file
@@ -0,0 +1,120 @@
|
||||
const sequelize = require('../database/database.js');
|
||||
const { Project, Img, Ann } = require('../models');
|
||||
const { fetchLableStudioProject, fetchProjectIdsAndTitles } = require('./fetch-labelstudio.js');
|
||||
|
||||
const updateStatus = { running: false };
|
||||
|
||||
async function seedLabelStudio() {
|
||||
updateStatus.running = true;
|
||||
console.log('Seeding started');
|
||||
try {
|
||||
await sequelize.sync();
|
||||
const projects = await fetchProjectIdsAndTitles();
|
||||
|
||||
for (const project of projects) {
|
||||
console.log(`Processing project ${project.id} (${project.title})`);
|
||||
|
||||
// Upsert project in DB
|
||||
await Project.upsert({ project_id: project.id, title: project.title });
|
||||
|
||||
// Fetch project data (annotations array)
|
||||
const data = await fetchLableStudioProject(project.id);
|
||||
if (!Array.isArray(data) || data.length === 0) {
|
||||
console.log(`No annotation data for project ${project.id}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Remove old images and annotations for this project
|
||||
const oldImages = await Img.findAll({ where: { project_id: project.id } });
|
||||
const oldImageIds = oldImages.map(img => img.image_id);
|
||||
if (oldImageIds.length > 0) {
|
||||
await Ann.destroy({ where: { image_id: oldImageIds } });
|
||||
await Img.destroy({ where: { project_id: project.id } });
|
||||
console.log(`Deleted ${oldImageIds.length} old images and their annotations for project ${project.id}`);
|
||||
}
|
||||
|
||||
// Prepare arrays
|
||||
const imagesBulk = [];
|
||||
const annsBulk = [];
|
||||
|
||||
for (const ann of data) {
|
||||
// Extract width/height
|
||||
let width = null;
|
||||
let height = null;
|
||||
if (Array.isArray(ann.label_rectangles) && ann.label_rectangles.length > 0) {
|
||||
width = ann.label_rectangles[0].original_width;
|
||||
height = ann.label_rectangles[0].original_height;
|
||||
} else if (Array.isArray(ann.label) && ann.label.length > 0 && ann.label[0].original_width && ann.label[0].original_height) {
|
||||
width = ann.label[0].original_width;
|
||||
height = ann.label[0].original_height;
|
||||
}
|
||||
|
||||
// Only push image and annotations if width and height are valid
|
||||
if (width && height) {
|
||||
imagesBulk.push({
|
||||
project_id: project.id,
|
||||
image_path: ann.image,
|
||||
width,
|
||||
height
|
||||
});
|
||||
|
||||
// Handle multiple annotations per image
|
||||
if (Array.isArray(ann.label_rectangles)) {
|
||||
for (const ann_detail of ann.label_rectangles) {
|
||||
annsBulk.push({
|
||||
image_path: ann.image,
|
||||
x: (ann_detail.x * width) / 100,
|
||||
y: (ann_detail.y * height) / 100,
|
||||
width: (ann_detail.width * width) / 100,
|
||||
height: (ann_detail.height * height) / 100,
|
||||
Label: Array.isArray(ann_detail.rectanglelabels) ? (ann_detail.rectanglelabels[0] || 'unknown') : (ann_detail.rectanglelabels || 'unknown')
|
||||
});
|
||||
}
|
||||
} else if (Array.isArray(ann.label)) {
|
||||
for (const ann_detail of ann.label) {
|
||||
annsBulk.push({
|
||||
image_path: ann.image,
|
||||
x: (ann_detail.x * width) / 100,
|
||||
y: (ann_detail.y * height) / 100,
|
||||
width: (ann_detail.width * width) / 100,
|
||||
height: (ann_detail.height * height) / 100,
|
||||
Label: Array.isArray(ann_detail.rectanglelabels) ? (ann_detail.rectanglelabels[0] || 'unknown') : (ann_detail.rectanglelabels || 'unknown')
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 1) Insert images and get generated IDs
|
||||
const insertedImages = await Img.bulkCreate(imagesBulk, { returning: true });
|
||||
|
||||
// 2) Map image_path -> image_id
|
||||
const imageMap = {};
|
||||
for (const img of insertedImages) {
|
||||
imageMap[img.image_path] = img.image_id;
|
||||
}
|
||||
|
||||
// 3) Assign correct image_id to each annotation
|
||||
for (const ann of annsBulk) {
|
||||
ann.image_id = imageMap[ann.image_path];
|
||||
delete ann.image_path; // cleanup
|
||||
}
|
||||
|
||||
// 4) Insert annotations
|
||||
await Ann.bulkCreate(annsBulk);
|
||||
|
||||
console.log(`Inserted ${imagesBulk.length} images and ${annsBulk.length} annotations for project ${project.id}`);
|
||||
}
|
||||
|
||||
console.log('Seeding done');
|
||||
return { success: true, message: 'Data inserted successfully!' };
|
||||
} catch (error) {
|
||||
console.error('Error inserting data:', error);
|
||||
return { success: false, message: error.message };
|
||||
} finally {
|
||||
updateStatus.running = false;
|
||||
console.log('updateStatus.running set to false');
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { seedLabelStudio, updateStatus };
|
||||
Reference in New Issue
Block a user