training fix. add global settings
This commit is contained in:
80
backend/.gitignore
vendored
80
backend/.gitignore
vendored
@@ -1,40 +1,40 @@
|
||||
# Python
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
*.so
|
||||
.Python
|
||||
venv/
|
||||
env/
|
||||
ENV/
|
||||
*.egg-info/
|
||||
dist/
|
||||
build/
|
||||
|
||||
# Flask
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Database
|
||||
*.db
|
||||
*.sqlite
|
||||
|
||||
# Environment
|
||||
.env
|
||||
.flaskenv
|
||||
|
||||
# IDE
|
||||
.vscode/
|
||||
.idea/
|
||||
*.swp
|
||||
*.swo
|
||||
|
||||
# OS
|
||||
.DS_Store
|
||||
Thumbs.db
|
||||
|
||||
# Logs
|
||||
*.log
|
||||
|
||||
# Uploads
|
||||
uploads/*.pth
|
||||
# Python
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
*.so
|
||||
.Python
|
||||
venv/
|
||||
env/
|
||||
ENV/
|
||||
*.egg-info/
|
||||
dist/
|
||||
build/
|
||||
|
||||
# Flask
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Database
|
||||
*.db
|
||||
*.sqlite
|
||||
|
||||
# Environment
|
||||
.env
|
||||
.flaskenv
|
||||
|
||||
# IDE
|
||||
.vscode/
|
||||
.idea/
|
||||
*.swp
|
||||
*.swo
|
||||
|
||||
# OS
|
||||
.DS_Store
|
||||
Thumbs.db
|
||||
|
||||
# Logs
|
||||
*.log
|
||||
|
||||
# Uploads
|
||||
uploads/*.pth
|
||||
|
||||
@@ -1,28 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding:utf-8 -*-
|
||||
# Copyright (c) Megvii, Inc. and its affiliates.
|
||||
|
||||
import os
|
||||
|
||||
from yolox.exp import Exp as MyExp
|
||||
|
||||
|
||||
class Exp(MyExp):
|
||||
def __init__(self):
|
||||
super(Exp, self).__init__()
|
||||
self.data_dir = "/home/kitraining/To_Annotate/"
|
||||
self.train_ann = "coco_project_27_train.json"
|
||||
self.val_ann = "coco_project_27_valid.json"
|
||||
self.test_ann = "coco_project_27_test.json"
|
||||
self.num_classes = 80
|
||||
self.pretrained_ckpt = r'/home/kitraining/Yolox/YOLOX-main/pretrained/YOLOX-s.pth'
|
||||
|
||||
|
||||
self.depth = 1.0
|
||||
self.width = 1.0
|
||||
self.input_size = (640.0, 640.0)
|
||||
self.mosaic_scale = (0.1, 2.0)
|
||||
self.random_size = (10, 20)
|
||||
self.test_size = (640.0, 640.0)
|
||||
self.exp_name = os.path.split(os.path.realpath(__file__))[1].split(".")[0]
|
||||
self.enable_mixup = False
|
||||
48
backend/1/6/exp.py
Normal file
48
backend/1/6/exp.py
Normal file
@@ -0,0 +1,48 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding:utf-8 -*-
|
||||
# Copyright (c) Megvii, Inc. and its affiliates.
|
||||
|
||||
import os
|
||||
|
||||
from yolox.exp import Exp as MyExp
|
||||
|
||||
|
||||
class Exp(MyExp):
|
||||
def __init__(self):
|
||||
super(Exp, self).__init__()
|
||||
self.data_dir = "/home/kitraining/To_Annotate/"
|
||||
self.train_ann = "coco_project_2_train.json"
|
||||
self.val_ann = "coco_project_2_valid.json"
|
||||
self.test_ann = "coco_project_2_test.json"
|
||||
self.num_classes = 2
|
||||
self.pretrained_ckpt = r'/home/kitraining/Yolox/YOLOX-main/pretrained/YOLOX-s.pth'
|
||||
self.activation = "silu"
|
||||
self.depth = 0.33
|
||||
self.scheduler = "yoloxwarmcos"
|
||||
self.width = 0.5
|
||||
self.input_size = (640.0, 640.0)
|
||||
self.mosaic_scale = (0.1, 2.0)
|
||||
self.test_size = (640.0, 640.0)
|
||||
self.enable_mixup = True
|
||||
self.max_epoch = 300
|
||||
self.warmup_epochs = 5
|
||||
self.warmup_lr = 0.0
|
||||
self.no_aug_epochs = 15
|
||||
self.min_lr_ratio = 0.05
|
||||
self.ema = True
|
||||
self.weight_decay = 0.0005
|
||||
self.momentum = 0.9
|
||||
self.print_interval = 10
|
||||
self.eval_interval = 10
|
||||
self.test_conf = 0.01
|
||||
self.nms_thre = 0.65
|
||||
self.mosaic_prob = 1.0
|
||||
self.mixup_prob = 1.0
|
||||
self.hsv_prob = 1.0
|
||||
self.flip_prob = 0.5
|
||||
self.degrees = 10.0
|
||||
self.translate = 0.1
|
||||
self.shear = 2.0
|
||||
self.mixup_scale = (0.5, 1.5)
|
||||
self.random_size = (10, 20)
|
||||
self.exp_name = os.path.split(os.path.realpath(__file__))[1].split(".")[0]
|
||||
48
backend/1/6/exp_infer.py
Normal file
48
backend/1/6/exp_infer.py
Normal file
@@ -0,0 +1,48 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding:utf-8 -*-
|
||||
# Copyright (c) Megvii, Inc. and its affiliates.
|
||||
|
||||
import os
|
||||
|
||||
from yolox.exp import Exp as MyExp
|
||||
|
||||
|
||||
class Exp(MyExp):
|
||||
def __init__(self):
|
||||
super(Exp, self).__init__()
|
||||
self.data_dir = "/home/kitraining/To_Annotate/"
|
||||
self.train_ann = "coco_project_6_train.json"
|
||||
self.val_ann = "coco_project_6_valid.json"
|
||||
self.test_ann = "coco_project_6_test.json"
|
||||
self.num_classes = 2
|
||||
self.pretrained_ckpt = r'/home/kitraining/Yolox/YOLOX-main/pretrained/YOLOX-s.pth'
|
||||
self.depth = 0.33
|
||||
self.width = 0.5
|
||||
self.input_size = (640.0, 640.0)
|
||||
self.mosaic_scale = (0.1, 2.0)
|
||||
self.test_size = (640.0, 640.0)
|
||||
self.enable_mixup = True
|
||||
self.max_epoch = 300
|
||||
self.warmup_epochs = 5
|
||||
self.warmup_lr = 0.0
|
||||
self.scheduler = "yoloxwarmcos"
|
||||
self.no_aug_epochs = 15
|
||||
self.min_lr_ratio = 0.05
|
||||
self.ema = True
|
||||
self.weight_decay = 0.0005
|
||||
self.momentum = 0.9
|
||||
self.print_interval = 10
|
||||
self.eval_interval = 10
|
||||
self.test_conf = 0.01
|
||||
self.nms_thre = 0.65
|
||||
self.mosaic_prob = 1.0
|
||||
self.mixup_prob = 1.0
|
||||
self.hsv_prob = 1.0
|
||||
self.flip_prob = 0.5
|
||||
self.degrees = 10.0
|
||||
self.translate = 0.1
|
||||
self.shear = 2.0
|
||||
self.mixup_scale = (0.5, 1.5)
|
||||
self.activation = "silu"
|
||||
self.random_size = (10, 20)
|
||||
self.exp_name = os.path.split(os.path.realpath(__file__))[1].split(".")[0]
|
||||
@@ -1,148 +0,0 @@
|
||||
# Backend Conversion Summary
|
||||
|
||||
## ✅ Conversion Complete
|
||||
|
||||
Your Node.js backend has been successfully converted to Python using Flask and SQLAlchemy.
|
||||
|
||||
## 📁 New Python Files Created
|
||||
|
||||
### Core Application
|
||||
- **app.py** - Main Flask application (replaces server.js)
|
||||
- **start.py** - Startup script for easy launching
|
||||
- **requirements.txt** - Python dependencies (replaces package.json)
|
||||
|
||||
### Database Layer
|
||||
- **database/database.py** - SQLAlchemy database configuration (replaces database.js)
|
||||
|
||||
### Models (Sequelize → SQLAlchemy)
|
||||
- **models/TrainingProject.py**
|
||||
- **models/TrainingProjectDetails.py**
|
||||
- **models/training.py**
|
||||
- **models/LabelStudioProject.py**
|
||||
- **models/Images.py**
|
||||
- **models/Annotation.py**
|
||||
- **models/__init__.py**
|
||||
|
||||
### API Routes
|
||||
- **routes/api.py** - All API endpoints converted to Flask blueprints (replaces api.js)
|
||||
- **routes/__init__.py**
|
||||
|
||||
### Services
|
||||
- **services/fetch_labelstudio.py** - Label Studio API integration
|
||||
- **services/seed_label_studio.py** - Database seeding logic
|
||||
- **services/generate_json_yolox.py** - COCO JSON generation
|
||||
- **services/generate_yolox_exp.py** - YOLOX experiment file generation
|
||||
- **services/push_yolox_exp.py** - Save training settings to DB
|
||||
- **services/__init__.py**
|
||||
|
||||
### Documentation
|
||||
- **README.md** - Comprehensive documentation
|
||||
- **QUICKSTART.md** - Quick setup guide
|
||||
- **.gitignore** - Python-specific ignore patterns
|
||||
|
||||
## 🔄 Key Changes
|
||||
|
||||
### Technology Stack
|
||||
| Component | Node.js | Python |
|
||||
|-----------|---------|--------|
|
||||
| Framework | Express.js | Flask |
|
||||
| ORM | Sequelize | SQLAlchemy |
|
||||
| HTTP Client | node-fetch | requests |
|
||||
| Package Manager | npm | pip |
|
||||
| Runtime | Node.js | Python 3.8+ |
|
||||
|
||||
### API Compatibility
|
||||
✅ All endpoints preserved with same URLs
|
||||
✅ Request/response formats maintained
|
||||
✅ Same database schema
|
||||
✅ Same business logic
|
||||
|
||||
### Converted Features
|
||||
- ✅ Training project management
|
||||
- ✅ Label Studio integration
|
||||
- ✅ YOLOX configuration and training
|
||||
- ✅ File upload handling
|
||||
- ✅ Image and annotation management
|
||||
- ✅ COCO JSON generation
|
||||
- ✅ Training logs
|
||||
|
||||
## 🚀 Getting Started
|
||||
|
||||
1. **Install dependencies:**
|
||||
```bash
|
||||
cd backend
|
||||
python -m venv venv
|
||||
.\venv\Scripts\Activate.ps1 # Windows
|
||||
pip install -r requirements.txt
|
||||
```
|
||||
|
||||
2. **Run the server:**
|
||||
```bash
|
||||
python start.py
|
||||
```
|
||||
|
||||
3. **Server runs at:** `http://0.0.0.0:3000`
|
||||
|
||||
## 📦 Dependencies Installed
|
||||
|
||||
- Flask 3.0.0 - Web framework
|
||||
- Flask-CORS 4.0.0 - Cross-origin resource sharing
|
||||
- Flask-SQLAlchemy 3.1.1 - ORM integration
|
||||
- SQLAlchemy 2.0.23 - Database ORM
|
||||
- PyMySQL 1.1.0 - MySQL driver
|
||||
- requests 2.31.0 - HTTP client
|
||||
- Pillow 10.1.0 - Image processing
|
||||
|
||||
## ⚠️ Important Notes
|
||||
|
||||
1. **Virtual Environment**: Always activate the virtual environment before running
|
||||
2. **Database**: MySQL must be running with the `myapp` database created
|
||||
3. **Credentials**: Update database credentials in `app.py` if needed
|
||||
4. **Python Version**: Requires Python 3.8 or higher
|
||||
|
||||
## 🧪 Testing
|
||||
|
||||
Test the conversion:
|
||||
```bash
|
||||
# Get all training projects
|
||||
curl http://localhost:3000/api/training-projects
|
||||
|
||||
# Get Label Studio projects
|
||||
curl http://localhost:3000/api/label-studio-projects
|
||||
```
|
||||
|
||||
## 📝 Original Files
|
||||
|
||||
Your original Node.js files remain untouched:
|
||||
- server.js
|
||||
- package.json
|
||||
- routes/api.js
|
||||
- models/*.js (JavaScript)
|
||||
- services/*.js (JavaScript)
|
||||
|
||||
You can keep them as backup or remove them once you verify the Python version works correctly.
|
||||
|
||||
## 🔍 What to Verify
|
||||
|
||||
1. ✅ Database connection works
|
||||
2. ✅ All API endpoints respond correctly
|
||||
3. ✅ File uploads work
|
||||
4. ✅ Label Studio integration works
|
||||
5. ✅ YOLOX training can be triggered
|
||||
6. ✅ COCO JSON generation works
|
||||
|
||||
## 🐛 Troubleshooting
|
||||
|
||||
See **QUICKSTART.md** for common issues and solutions.
|
||||
|
||||
## 📚 Further Documentation
|
||||
|
||||
- **README.md** - Complete project documentation
|
||||
- **QUICKSTART.md** - Setup guide
|
||||
- **API Documentation** - All endpoints documented in README.md
|
||||
|
||||
---
|
||||
|
||||
**Conversion completed successfully!** 🎉
|
||||
|
||||
Your backend is now running on Python with Flask and SQLAlchemy.
|
||||
@@ -1,113 +1,113 @@
|
||||
# Quick Start Guide - Python Backend
|
||||
|
||||
## Step-by-Step Setup
|
||||
|
||||
### 1. Install Python
|
||||
Make sure you have Python 3.8 or higher installed:
|
||||
```bash
|
||||
python --version
|
||||
```
|
||||
|
||||
### 2. Create Virtual Environment
|
||||
```bash
|
||||
cd backend
|
||||
python -m venv venv
|
||||
```
|
||||
|
||||
### 3. Activate Virtual Environment
|
||||
|
||||
**Windows:**
|
||||
```powershell
|
||||
.\venv\Scripts\Activate.ps1
|
||||
```
|
||||
|
||||
**Linux/Mac:**
|
||||
```bash
|
||||
source venv/bin/activate
|
||||
```
|
||||
|
||||
### 4. Install Dependencies
|
||||
```bash
|
||||
pip install -r requirements.txt
|
||||
```
|
||||
|
||||
### 5. Verify Database Connection
|
||||
Make sure MySQL is running and the database `myapp` exists:
|
||||
```sql
|
||||
CREATE DATABASE IF NOT EXISTS myapp;
|
||||
```
|
||||
|
||||
### 6. Run the Server
|
||||
```bash
|
||||
python start.py
|
||||
```
|
||||
|
||||
Or:
|
||||
```bash
|
||||
python app.py
|
||||
```
|
||||
|
||||
The server should now be running at `http://0.0.0.0:3000`
|
||||
|
||||
## Testing the API
|
||||
|
||||
Test if the server is working:
|
||||
```bash
|
||||
curl http://localhost:3000/api/training-projects
|
||||
```
|
||||
|
||||
## Common Issues
|
||||
|
||||
### ModuleNotFoundError
|
||||
If you get import errors, make sure you've activated the virtual environment and installed all dependencies.
|
||||
|
||||
### Database Connection Error
|
||||
Check that:
|
||||
- MySQL is running
|
||||
- Database credentials in `app.py` are correct
|
||||
- Database `myapp` exists
|
||||
|
||||
### Port Already in Use
|
||||
If port 3000 is already in use, modify the port in `app.py`:
|
||||
```python
|
||||
app.run(host='0.0.0.0', port=3001, debug=True)
|
||||
```
|
||||
|
||||
## What Changed from Node.js
|
||||
|
||||
1. **Server Framework**: Express.js → Flask
|
||||
2. **ORM**: Sequelize → SQLAlchemy
|
||||
3. **HTTP Client**: node-fetch → requests
|
||||
4. **Package Manager**: npm → pip
|
||||
5. **Dependencies**: package.json → requirements.txt
|
||||
6. **Startup**: `node server.js` → `python app.py`
|
||||
|
||||
## Next Steps
|
||||
|
||||
1. Test all API endpoints
|
||||
2. Update frontend to point to the new Python backend (if needed)
|
||||
3. Migrate any remaining Node.js-specific logic
|
||||
4. Test file uploads and downloads
|
||||
5. Test YOLOX training functionality
|
||||
|
||||
## File Structure Comparison
|
||||
|
||||
**Before (Node.js):**
|
||||
```
|
||||
backend/
|
||||
├── server.js
|
||||
├── package.json
|
||||
├── routes/api.js
|
||||
├── models/*.js
|
||||
└── services/*.js
|
||||
```
|
||||
|
||||
**After (Python):**
|
||||
```
|
||||
backend/
|
||||
├── app.py
|
||||
├── requirements.txt
|
||||
├── routes/api.py
|
||||
├── models/*.py
|
||||
└── services/*.py
|
||||
```
|
||||
# Quick Start Guide - Python Backend
|
||||
|
||||
## Step-by-Step Setup
|
||||
|
||||
### 1. Install Python
|
||||
Make sure you have Python 3.8 or higher installed:
|
||||
```bash
|
||||
python --version
|
||||
```
|
||||
|
||||
### 2. Create Virtual Environment
|
||||
```bash
|
||||
cd backend
|
||||
python -m venv venv
|
||||
```
|
||||
|
||||
### 3. Activate Virtual Environment
|
||||
|
||||
**Windows:**
|
||||
```powershell
|
||||
.\venv\Scripts\Activate.ps1
|
||||
```
|
||||
|
||||
**Linux/Mac:**
|
||||
```bash
|
||||
source venv/bin/activate
|
||||
```
|
||||
|
||||
### 4. Install Dependencies
|
||||
```bash
|
||||
pip install -r requirements.txt
|
||||
```
|
||||
|
||||
### 5. Verify Database Connection
|
||||
Make sure MySQL is running and the database `myapp` exists:
|
||||
```sql
|
||||
CREATE DATABASE IF NOT EXISTS myapp;
|
||||
```
|
||||
|
||||
### 6. Run the Server
|
||||
```bash
|
||||
python start.py
|
||||
```
|
||||
|
||||
Or:
|
||||
```bash
|
||||
python app.py
|
||||
```
|
||||
|
||||
The server should now be running at `http://0.0.0.0:3000`
|
||||
|
||||
## Testing the API
|
||||
|
||||
Test if the server is working:
|
||||
```bash
|
||||
curl http://localhost:3000/api/training-projects
|
||||
```
|
||||
|
||||
## Common Issues
|
||||
|
||||
### ModuleNotFoundError
|
||||
If you get import errors, make sure you've activated the virtual environment and installed all dependencies.
|
||||
|
||||
### Database Connection Error
|
||||
Check that:
|
||||
- MySQL is running
|
||||
- Database credentials in `app.py` are correct
|
||||
- Database `myapp` exists
|
||||
|
||||
### Port Already in Use
|
||||
If port 3000 is already in use, modify the port in `app.py`:
|
||||
```python
|
||||
app.run(host='0.0.0.0', port=3001, debug=True)
|
||||
```
|
||||
|
||||
## What Changed from Node.js
|
||||
|
||||
1. **Server Framework**: Express.js → Flask
|
||||
2. **ORM**: Sequelize → SQLAlchemy
|
||||
3. **HTTP Client**: node-fetch → requests
|
||||
4. **Package Manager**: npm → pip
|
||||
5. **Dependencies**: package.json → requirements.txt
|
||||
6. **Startup**: `node server.js` → `python app.py`
|
||||
|
||||
## Next Steps
|
||||
|
||||
1. Test all API endpoints
|
||||
2. Update frontend to point to the new Python backend (if needed)
|
||||
3. Migrate any remaining Node.js-specific logic
|
||||
4. Test file uploads and downloads
|
||||
5. Test YOLOX training functionality
|
||||
|
||||
## File Structure Comparison
|
||||
|
||||
**Before (Node.js):**
|
||||
```
|
||||
backend/
|
||||
├── server.js
|
||||
├── package.json
|
||||
├── routes/api.js
|
||||
├── models/*.js
|
||||
└── services/*.js
|
||||
```
|
||||
|
||||
**After (Python):**
|
||||
```
|
||||
backend/
|
||||
├── app.py
|
||||
├── requirements.txt
|
||||
├── routes/api.py
|
||||
├── models/*.py
|
||||
└── services/*.py
|
||||
```
|
||||
|
||||
@@ -1,107 +1,107 @@
|
||||
# Python Backend for COCO Tool
|
||||
|
||||
This is the converted Python backend using Flask and SQLAlchemy.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Create a virtual environment (recommended):
|
||||
```bash
|
||||
python -m venv venv
|
||||
```
|
||||
|
||||
2. Activate the virtual environment:
|
||||
- Windows: `venv\Scripts\activate`
|
||||
- Linux/Mac: `source venv/bin/activate`
|
||||
|
||||
3. Install dependencies:
|
||||
```bash
|
||||
pip install -r requirements.txt
|
||||
```
|
||||
|
||||
## Running the Server
|
||||
|
||||
### Option 1: Using start.py
|
||||
```bash
|
||||
python start.py
|
||||
```
|
||||
|
||||
### Option 2: Using Flask directly
|
||||
```bash
|
||||
python app.py
|
||||
```
|
||||
|
||||
### Option 3: Using Flask CLI
|
||||
```bash
|
||||
flask --app app run --host=0.0.0.0 --port=3000
|
||||
```
|
||||
|
||||
The server will start on `http://0.0.0.0:3000`
|
||||
|
||||
## Database Configuration
|
||||
|
||||
The database configuration is in `database/database.py`. Default settings:
|
||||
- Host: localhost
|
||||
- Database: myapp
|
||||
- User: root
|
||||
- Password: root
|
||||
|
||||
Modify `app.py` to change these settings.
|
||||
|
||||
## Project Structure
|
||||
|
||||
```
|
||||
backend/
|
||||
├── app.py # Main Flask application
|
||||
├── start.py # Startup script
|
||||
├── requirements.txt # Python dependencies
|
||||
├── database/
|
||||
│ └── database.py # Database configuration
|
||||
├── models/ # SQLAlchemy models
|
||||
│ ├── __init__.py
|
||||
│ ├── Annotation.py
|
||||
│ ├── Images.py
|
||||
│ ├── LabelStudioProject.py
|
||||
│ ├── training.py
|
||||
│ ├── TrainingProject.py
|
||||
│ └── TrainingProjectDetails.py
|
||||
├── routes/
|
||||
│ └── api.py # API endpoints
|
||||
└── services/ # Business logic
|
||||
├── fetch_labelstudio.py
|
||||
├── generate_json_yolox.py
|
||||
├── generate_yolox_exp.py
|
||||
├── push_yolox_exp.py
|
||||
└── seed_label_studio.py
|
||||
```
|
||||
|
||||
## API Endpoints
|
||||
|
||||
All endpoints are prefixed with `/api`:
|
||||
|
||||
- `GET /api/seed` - Seed database from Label Studio
|
||||
- `POST /api/generate-yolox-json` - Generate YOLOX training files
|
||||
- `POST /api/start-yolox-training` - Start YOLOX training
|
||||
- `GET /api/training-log` - Get training logs
|
||||
- `GET/POST /api/training-projects` - Manage training projects
|
||||
- `GET /api/label-studio-projects` - Get Label Studio projects
|
||||
- `GET/POST/PUT /api/training-project-details` - Manage project details
|
||||
- `POST /api/yolox-settings` - Save YOLOX settings
|
||||
- `GET/DELETE /api/trainings` - Manage trainings
|
||||
- `DELETE /api/training-projects/:id` - Delete training project
|
||||
|
||||
## Migration Notes
|
||||
|
||||
This is a direct conversion from Node.js/Express to Python/Flask:
|
||||
- Express → Flask
|
||||
- Sequelize ORM → SQLAlchemy ORM
|
||||
- node-fetch → requests library
|
||||
- Async routes maintained where needed
|
||||
- All file paths and logic preserved from original
|
||||
|
||||
## Differences from Node.js Version
|
||||
|
||||
1. Python uses async/await differently - some routes may need adjustments
|
||||
2. File handling uses Python's built-in open() instead of fs module
|
||||
3. Subprocess calls use Python's subprocess module
|
||||
4. JSON handling uses Python's json module
|
||||
5. Path operations use os.path instead of Node's path module
|
||||
# Python Backend for COCO Tool
|
||||
|
||||
This is the converted Python backend using Flask and SQLAlchemy.
|
||||
|
||||
## Setup
|
||||
|
||||
1. Create a virtual environment (recommended):
|
||||
```bash
|
||||
python -m venv venv
|
||||
```
|
||||
|
||||
2. Activate the virtual environment:
|
||||
- Windows: `venv\Scripts\activate`
|
||||
- Linux/Mac: `source venv/bin/activate`
|
||||
|
||||
3. Install dependencies:
|
||||
```bash
|
||||
pip install -r requirements.txt
|
||||
```
|
||||
|
||||
## Running the Server
|
||||
|
||||
### Option 1: Using start.py
|
||||
```bash
|
||||
python start.py
|
||||
```
|
||||
|
||||
### Option 2: Using Flask directly
|
||||
```bash
|
||||
python app.py
|
||||
```
|
||||
|
||||
### Option 3: Using Flask CLI
|
||||
```bash
|
||||
flask --app app run --host=0.0.0.0 --port=3000
|
||||
```
|
||||
|
||||
The server will start on `http://0.0.0.0:3000`
|
||||
|
||||
## Database Configuration
|
||||
|
||||
The database configuration is in `database/database.py`. Default settings:
|
||||
- Host: localhost
|
||||
- Database: myapp
|
||||
- User: root
|
||||
- Password: root
|
||||
|
||||
Modify `app.py` to change these settings.
|
||||
|
||||
## Project Structure
|
||||
|
||||
```
|
||||
backend/
|
||||
├── app.py # Main Flask application
|
||||
├── start.py # Startup script
|
||||
├── requirements.txt # Python dependencies
|
||||
├── database/
|
||||
│ └── database.py # Database configuration
|
||||
├── models/ # SQLAlchemy models
|
||||
│ ├── __init__.py
|
||||
│ ├── Annotation.py
|
||||
│ ├── Images.py
|
||||
│ ├── LabelStudioProject.py
|
||||
│ ├── training.py
|
||||
│ ├── TrainingProject.py
|
||||
│ └── TrainingProjectDetails.py
|
||||
├── routes/
|
||||
│ └── api.py # API endpoints
|
||||
└── services/ # Business logic
|
||||
├── fetch_labelstudio.py
|
||||
├── generate_json_yolox.py
|
||||
├── generate_yolox_exp.py
|
||||
├── push_yolox_exp.py
|
||||
└── seed_label_studio.py
|
||||
```
|
||||
|
||||
## API Endpoints
|
||||
|
||||
All endpoints are prefixed with `/api`:
|
||||
|
||||
- `GET /api/seed` - Seed database from Label Studio
|
||||
- `POST /api/generate-yolox-json` - Generate YOLOX training files
|
||||
- `POST /api/start-yolox-training` - Start YOLOX training
|
||||
- `GET /api/training-log` - Get training logs
|
||||
- `GET/POST /api/training-projects` - Manage training projects
|
||||
- `GET /api/label-studio-projects` - Get Label Studio projects
|
||||
- `GET/POST/PUT /api/training-project-details` - Manage project details
|
||||
- `POST /api/yolox-settings` - Save YOLOX settings
|
||||
- `GET/DELETE /api/trainings` - Manage trainings
|
||||
- `DELETE /api/training-projects/:id` - Delete training project
|
||||
|
||||
## Migration Notes
|
||||
|
||||
This is a direct conversion from Node.js/Express to Python/Flask:
|
||||
- Express → Flask
|
||||
- Sequelize ORM → SQLAlchemy ORM
|
||||
- node-fetch → requests library
|
||||
- Async routes maintained where needed
|
||||
- All file paths and logic preserved from original
|
||||
|
||||
## Differences from Node.js Version
|
||||
|
||||
1. Python uses async/await differently - some routes may need adjustments
|
||||
2. File handling uses Python's built-in open() instead of fs module
|
||||
3. Subprocess calls use Python's subprocess module
|
||||
4. JSON handling uses Python's json module
|
||||
5. Path operations use os.path instead of Node's path module
|
||||
|
||||
@@ -1,43 +1,48 @@
|
||||
from flask import Flask, send_from_directory
|
||||
from flask_cors import CORS
|
||||
import os
|
||||
from database.database import db, init_db
|
||||
|
||||
app = Flask(__name__, static_folder='..', static_url_path='')
|
||||
CORS(app)
|
||||
|
||||
# Configure database
|
||||
app.config['SQLALCHEMY_DATABASE_URI'] = 'mysql+pymysql://root:root@localhost/myapp'
|
||||
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
|
||||
|
||||
# Initialize database
|
||||
db.init_app(app)
|
||||
|
||||
# Import and register blueprints
|
||||
from routes.api import api_bp
|
||||
app.register_blueprint(api_bp, url_prefix='/api')
|
||||
|
||||
# Serve static files (HTML, CSS, JS)
|
||||
@app.route('/')
|
||||
def index():
|
||||
return send_from_directory('..', 'index.html')
|
||||
|
||||
@app.route('/<path:path>')
|
||||
def serve_static(path):
|
||||
return send_from_directory('..', path)
|
||||
|
||||
# Initialize DB and start server
|
||||
if __name__ == '__main__':
|
||||
with app.app_context():
|
||||
try:
|
||||
# Test database connection
|
||||
db.engine.connect()
|
||||
print('DB connection established.')
|
||||
|
||||
# Create tables if they don't exist
|
||||
db.create_all()
|
||||
|
||||
# Start server
|
||||
app.run(host='0.0.0.0', port=3000, debug=True)
|
||||
except Exception as err:
|
||||
print(f'Failed to start: {err}')
|
||||
from flask import Flask, send_from_directory
|
||||
from flask_cors import CORS
|
||||
import os
|
||||
from database.database import db, init_db
|
||||
|
||||
app = Flask(__name__, static_folder='..', static_url_path='')
|
||||
CORS(app)
|
||||
|
||||
# Configure database
|
||||
app.config['SQLALCHEMY_DATABASE_URI'] = 'mysql+pymysql://root:root@localhost/myapp2'
|
||||
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
|
||||
|
||||
# Initialize database
|
||||
db.init_app(app)
|
||||
|
||||
# Import and register blueprints
|
||||
from routes.api import api_bp
|
||||
app.register_blueprint(api_bp, url_prefix='/api')
|
||||
|
||||
# Serve static files (HTML, CSS, JS)
|
||||
@app.route('/')
|
||||
def index():
|
||||
return send_from_directory('..', 'index.html')
|
||||
|
||||
@app.route('/<path:path>')
|
||||
def serve_static(path):
|
||||
return send_from_directory('..', path)
|
||||
|
||||
# Initialize DB and start server
|
||||
if __name__ == '__main__':
|
||||
with app.app_context():
|
||||
try:
|
||||
# Test database connection
|
||||
db.engine.connect()
|
||||
print('DB connection established.')
|
||||
|
||||
# Create tables if they don't exist
|
||||
db.create_all()
|
||||
|
||||
# Initialize default settings
|
||||
from services.settings_service import initialize_default_settings
|
||||
initialize_default_settings()
|
||||
print('Settings initialized.')
|
||||
|
||||
# Start server
|
||||
app.run(host='0.0.0.0', port=4000, debug=True)
|
||||
except Exception as err:
|
||||
print(f'Failed to start: {err}')
|
||||
|
||||
@@ -1,25 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding:utf-8 -*-
|
||||
# Copyright (c) Megvii, Inc. and its affiliates.
|
||||
|
||||
import os
|
||||
|
||||
from yolox.exp import Exp as MyExp
|
||||
|
||||
|
||||
class Exp(MyExp):
|
||||
def __init__(self):
|
||||
super(Exp, self).__init__()
|
||||
self.data_dir = "/home/kitraining/To_Annotate/"
|
||||
self.train_ann = "coco_project_5_train.json"
|
||||
self.val_ann = "coco_project_5_valid.json"
|
||||
self.test_ann = "coco_project_5_test.json"
|
||||
self.num_classes = 4
|
||||
self.depth = 1.0
|
||||
self.width = 1.0
|
||||
self.input_size = (640, 640)
|
||||
self.mosaic_scale = (0.1, 2)
|
||||
self.random_size = (10, 20)
|
||||
self.test_size = (640, 640)
|
||||
self.exp_name = os.path.split(os.path.realpath(__file__))[1].split(".")[0]
|
||||
self.enable_mixup = False
|
||||
84
backend/backend/1/1/exp_infer.py
Normal file
84
backend/backend/1/1/exp_infer.py
Normal file
@@ -0,0 +1,84 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding:utf-8 -*-
|
||||
# Copyright (c) Megvii, Inc. and its affiliates.
|
||||
|
||||
import os
|
||||
|
||||
from yolox.exp import Exp as MyExp
|
||||
|
||||
|
||||
class Exp(MyExp):
|
||||
def __init__(self):
|
||||
super(Exp, self).__init__()
|
||||
self.data_dir = "/home/kitraining/To_Annotate" # Where images are located
|
||||
self.annotations_dir = "./backend/1/custom_exp_1" # Where annotation JSONs are located
|
||||
self.train_ann = "coco_project_1_train.json"
|
||||
self.val_ann = "coco_project_1_valid.json"
|
||||
self.test_ann = "coco_project_1_test.json"
|
||||
self.num_classes = 2
|
||||
# Disable train2017 subdirectory - our images are directly in data_dir
|
||||
self.name = ""
|
||||
# Set data workers for training
|
||||
self.data_num_workers = 8
|
||||
self.depth = 1.0
|
||||
self.width = 1.0
|
||||
self.input_size = (640.0, 640.0)
|
||||
self.mosaic_scale = (0.1, 2.0)
|
||||
self.test_size = (640.0, 640.0)
|
||||
self.enable_mixup = True
|
||||
self.max_epoch = 300
|
||||
self.warmup_epochs = 5
|
||||
self.warmup_lr = 0.0
|
||||
self.scheduler = "yoloxwarmcos"
|
||||
self.no_aug_epochs = 15
|
||||
self.min_lr_ratio = 0.05
|
||||
self.ema = True
|
||||
self.weight_decay = 0.0005
|
||||
self.momentum = 0.9
|
||||
self.print_interval = 10
|
||||
self.eval_interval = 10
|
||||
self.test_conf = 0.01
|
||||
self.nms_thre = 0.65
|
||||
self.mosaic_prob = 1.0
|
||||
self.mixup_prob = 1.0
|
||||
self.hsv_prob = 1.0
|
||||
self.flip_prob = 0.5
|
||||
self.degrees = 10.0
|
||||
self.translate = 0.1
|
||||
self.shear = 2.0
|
||||
self.mixup_scale = (0.5, 1.5)
|
||||
self.activation = "silu"
|
||||
self.random_size = (10, 20)
|
||||
|
||||
def get_dataset(self, cache=False, cache_type="ram"):
|
||||
"""Override to use name parameter for images directory"""
|
||||
from yolox.data import COCODataset
|
||||
|
||||
# COCODataset constructs image paths as: os.path.join(data_dir, name, file_name)
|
||||
# YOLOX adds "annotations/" to data_dir automatically, so we pass annotations_dir directly
|
||||
# Use empty string for name since we have absolute paths in JSON
|
||||
return COCODataset(
|
||||
data_dir=self.annotations_dir,
|
||||
json_file=self.train_ann,
|
||||
name="",
|
||||
img_size=self.input_size,
|
||||
preproc=self.preproc if hasattr(self, 'preproc') else None,
|
||||
cache=cache,
|
||||
cache_type=cache_type,
|
||||
)
|
||||
|
||||
def get_eval_dataset(self, **kwargs):
|
||||
"""Override eval dataset using name parameter"""
|
||||
from yolox.data import COCODataset
|
||||
|
||||
testdev = kwargs.get("testdev", False)
|
||||
legacy = kwargs.get("legacy", False)
|
||||
|
||||
return COCODataset(
|
||||
data_dir=self.annotations_dir,
|
||||
json_file=self.val_ann if not testdev else self.test_ann,
|
||||
name="",
|
||||
img_size=self.test_size,
|
||||
preproc=None, # No preprocessing for evaluation
|
||||
)
|
||||
self.exp_name = os.path.split(os.path.realpath(__file__))[1].split(".")[0]
|
||||
3634
backend/backend/1/custom_exp_1/annotations/coco_project_1_test.json
Normal file
3634
backend/backend/1/custom_exp_1/annotations/coco_project_1_test.json
Normal file
File diff suppressed because it is too large
Load Diff
57310
backend/backend/1/custom_exp_1/annotations/coco_project_1_train.json
Normal file
57310
backend/backend/1/custom_exp_1/annotations/coco_project_1_train.json
Normal file
File diff suppressed because it is too large
Load Diff
6874
backend/backend/1/custom_exp_1/annotations/coco_project_1_valid.json
Normal file
6874
backend/backend/1/custom_exp_1/annotations/coco_project_1_valid.json
Normal file
File diff suppressed because it is too large
Load Diff
84
backend/backend/1/custom_exp_1/exp.py
Normal file
84
backend/backend/1/custom_exp_1/exp.py
Normal file
@@ -0,0 +1,84 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding:utf-8 -*-
|
||||
# Copyright (c) Megvii, Inc. and its affiliates.
|
||||
|
||||
import os
|
||||
|
||||
from yolox.exp import Exp as MyExp
|
||||
|
||||
|
||||
class Exp(MyExp):
|
||||
def __init__(self):
|
||||
super(Exp, self).__init__()
|
||||
self.data_dir = "/home/kitraining/To_Annotate" # Where images are located
|
||||
self.annotations_dir = "./backend/1/custom_exp_1" # Where annotation JSONs are located
|
||||
self.train_ann = "coco_project_1_train.json"
|
||||
self.val_ann = "coco_project_1_valid.json"
|
||||
self.test_ann = "coco_project_1_test.json"
|
||||
self.num_classes = 2
|
||||
# Disable train2017 subdirectory - our images are directly in data_dir
|
||||
self.name = ""
|
||||
# Set data workers for training
|
||||
self.data_num_workers = 8
|
||||
self.depth = 1.0
|
||||
self.width = 1.0
|
||||
self.input_size = (640.0, 640.0)
|
||||
self.mosaic_scale = (0.1, 2.0)
|
||||
self.test_size = (640.0, 640.0)
|
||||
self.enable_mixup = True
|
||||
self.max_epoch = 300
|
||||
self.warmup_epochs = 5
|
||||
self.warmup_lr = 0.0
|
||||
self.scheduler = "yoloxwarmcos"
|
||||
self.no_aug_epochs = 15
|
||||
self.min_lr_ratio = 0.05
|
||||
self.ema = True
|
||||
self.weight_decay = 0.0005
|
||||
self.momentum = 0.9
|
||||
self.print_interval = 10
|
||||
self.eval_interval = 10
|
||||
self.test_conf = 0.01
|
||||
self.nms_thre = 0.65
|
||||
self.mosaic_prob = 1.0
|
||||
self.mixup_prob = 1.0
|
||||
self.hsv_prob = 1.0
|
||||
self.flip_prob = 0.5
|
||||
self.degrees = 10.0
|
||||
self.translate = 0.1
|
||||
self.shear = 2.0
|
||||
self.mixup_scale = (0.5, 1.5)
|
||||
self.activation = "silu"
|
||||
self.random_size = (10, 20)
|
||||
|
||||
def get_dataset(self, cache=False, cache_type="ram"):
|
||||
"""Override to use name parameter for images directory"""
|
||||
from yolox.data import COCODataset
|
||||
|
||||
# COCODataset constructs image paths as: os.path.join(data_dir, name, file_name)
|
||||
# YOLOX adds "annotations/" to data_dir automatically, so we pass annotations_dir directly
|
||||
# Use empty string for name since we have absolute paths in JSON
|
||||
return COCODataset(
|
||||
data_dir=self.annotations_dir,
|
||||
json_file=self.train_ann,
|
||||
name="",
|
||||
img_size=self.input_size,
|
||||
preproc=self.preproc if hasattr(self, 'preproc') else None,
|
||||
cache=cache,
|
||||
cache_type=cache_type,
|
||||
)
|
||||
|
||||
def get_eval_dataset(self, **kwargs):
|
||||
"""Override eval dataset using name parameter"""
|
||||
from yolox.data import COCODataset
|
||||
|
||||
testdev = kwargs.get("testdev", False)
|
||||
legacy = kwargs.get("legacy", False)
|
||||
|
||||
return COCODataset(
|
||||
data_dir=self.annotations_dir,
|
||||
json_file=self.val_ann if not testdev else self.test_ann,
|
||||
name="",
|
||||
img_size=self.test_size,
|
||||
preproc=None, # No preprocessing for evaluation
|
||||
)
|
||||
self.exp_name = os.path.split(os.path.realpath(__file__))[1].split(".")[0]
|
||||
@@ -1,14 +1,14 @@
|
||||
import pymysql
|
||||
|
||||
conn = pymysql.connect(host='localhost', user='root', password='root', database='myapp')
|
||||
cursor = conn.cursor()
|
||||
cursor.execute('DESCRIBE image')
|
||||
rows = cursor.fetchall()
|
||||
|
||||
print("Current 'image' table structure:")
|
||||
print("-" * 60)
|
||||
for row in rows:
|
||||
print(f"Field: {row[0]:<15} Type: {row[1]:<15} Null: {row[2]}")
|
||||
print("-" * 60)
|
||||
|
||||
conn.close()
|
||||
import pymysql
|
||||
|
||||
conn = pymysql.connect(host='localhost', user='root', password='root', database='myapp2')
|
||||
cursor = conn.cursor()
|
||||
cursor.execute('DESCRIBE image')
|
||||
rows = cursor.fetchall()
|
||||
|
||||
print("Current 'image' table structure:")
|
||||
print("-" * 60)
|
||||
for row in rows:
|
||||
print(f"Field: {row[0]:<15} Type: {row[1]:<15} Null: {row[2]}")
|
||||
print("-" * 60)
|
||||
|
||||
conn.close()
|
||||
|
||||
@@ -1,140 +1,140 @@
|
||||
# YOLOX Base Configuration System
|
||||
|
||||
## Overview
|
||||
|
||||
This directory contains base experiment configurations for YOLOX models. These configurations define "protected" parameters that are preserved during transfer learning from COCO-pretrained models.
|
||||
|
||||
## How It Works
|
||||
|
||||
### Transfer Learning Flow
|
||||
|
||||
1. **COCO Transfer Learning** (`transfer_learning = 'coco'`):
|
||||
- Loads base configuration from `data/yolox_*.py` based on `selected_model`
|
||||
- Base parameters are **protected** and used as defaults
|
||||
- User settings from the form only override what's explicitly set
|
||||
- Result: Best of both worlds - proven COCO settings + your customizations
|
||||
|
||||
2. **Sketch/Custom Training** (`transfer_learning = 'sketch'`):
|
||||
- No base configuration loaded
|
||||
- Uses only user-defined parameters from the training form
|
||||
- Full control over all settings
|
||||
|
||||
### Base Configuration Files
|
||||
|
||||
- `yolox_s.py` - YOLOX-Small (depth=0.33, width=0.50)
|
||||
- `yolox_m.py` - YOLOX-Medium (depth=0.67, width=0.75)
|
||||
- `yolox_l.py` - YOLOX-Large (depth=1.0, width=1.0)
|
||||
- `yolox_x.py` - YOLOX-XLarge (depth=1.33, width=1.25)
|
||||
|
||||
### Protected Parameters
|
||||
|
||||
These parameters are defined in base configs and **preserved** unless explicitly overridden:
|
||||
|
||||
**Model Architecture:**
|
||||
- `depth` - Model depth multiplier
|
||||
- `width` - Model width multiplier
|
||||
- `activation` - Activation function (silu)
|
||||
|
||||
**Training Hyperparameters:**
|
||||
- `basic_lr_per_img` - Learning rate per image
|
||||
- `scheduler` - LR scheduler (yoloxwarmcos)
|
||||
- `warmup_epochs` - Warmup epochs
|
||||
- `max_epoch` - Maximum training epochs
|
||||
- `no_aug_epochs` - No augmentation epochs
|
||||
- `min_lr_ratio` - Minimum LR ratio
|
||||
|
||||
**Optimizer:**
|
||||
- `momentum` - SGD momentum
|
||||
- `weight_decay` - Weight decay
|
||||
|
||||
**Augmentation:**
|
||||
- `mosaic_prob` - Mosaic probability
|
||||
- `mixup_prob` - Mixup probability
|
||||
- `hsv_prob` - HSV augmentation probability
|
||||
- `flip_prob` - Flip probability
|
||||
- `degrees` - Rotation degrees
|
||||
- `translate` - Translation
|
||||
- `shear` - Shear
|
||||
- `mosaic_scale` - Mosaic scale range
|
||||
- `mixup_scale` - Mixup scale range
|
||||
- `enable_mixup` - Enable mixup
|
||||
|
||||
**Input/Output:**
|
||||
- `input_size` - Training input size
|
||||
- `test_size` - Testing size
|
||||
- `random_size` - Random size range
|
||||
|
||||
**Evaluation:**
|
||||
- `eval_interval` - Evaluation interval
|
||||
- `print_interval` - Print interval
|
||||
|
||||
## Customizing Base Configurations
|
||||
|
||||
### Adding a New Model
|
||||
|
||||
Create a new file `data/yolox_MODELNAME.py`:
|
||||
|
||||
```python
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding:utf-8 -*-
|
||||
# Base configuration for YOLOX-MODELNAME
|
||||
|
||||
class BaseExp:
|
||||
"""Base experiment configuration for YOLOX-MODELNAME"""
|
||||
|
||||
# Define protected parameters
|
||||
depth = 1.0
|
||||
width = 1.0
|
||||
# ... other parameters
|
||||
```
|
||||
|
||||
### Modifying Parameters
|
||||
|
||||
Edit the corresponding `yolox_*.py` file and update the `BaseExp` class attributes.
|
||||
|
||||
**Example:** To change YOLOX-S max epochs:
|
||||
```python
|
||||
# In data/yolox_s.py
|
||||
class BaseExp:
|
||||
max_epoch = 500 # Changed from 300
|
||||
# ... other parameters
|
||||
```
|
||||
|
||||
## Parameter Priority
|
||||
|
||||
The merge logic follows this priority (highest to lowest):
|
||||
|
||||
1. **User form values** (if explicitly set, not None)
|
||||
2. **Base config values** (if transfer_learning='coco')
|
||||
3. **Default fallbacks** (hardcoded minimums)
|
||||
|
||||
## Example
|
||||
|
||||
### COCO Transfer Learning
|
||||
```
|
||||
User sets in form: max_epoch=100, depth=0.5
|
||||
Base config (yolox_s.py) has: depth=0.33, width=0.50, max_epoch=300
|
||||
|
||||
Result: depth=0.5 (user override), width=0.50 (base), max_epoch=100 (user override)
|
||||
```
|
||||
|
||||
### Sketch Training
|
||||
```
|
||||
User sets in form: max_epoch=100, depth=0.5
|
||||
No base config loaded
|
||||
|
||||
Result: depth=0.5 (user), max_epoch=100 (user), width=1.0 (default fallback)
|
||||
```
|
||||
|
||||
## Debugging
|
||||
|
||||
To see which base config was loaded, check Flask logs:
|
||||
```
|
||||
Loaded base config for yolox-s: ['depth', 'width', 'activation', ...]
|
||||
```
|
||||
|
||||
If base config fails to load:
|
||||
```
|
||||
Warning: Could not load base config for yolox-s: [error message]
|
||||
Falling back to custom settings only
|
||||
```
|
||||
# YOLOX Base Configuration System
|
||||
|
||||
## Overview
|
||||
|
||||
This directory contains base experiment configurations for YOLOX models. These configurations define "protected" parameters that are preserved during transfer learning from COCO-pretrained models.
|
||||
|
||||
## How It Works
|
||||
|
||||
### Transfer Learning Flow
|
||||
|
||||
1. **COCO Transfer Learning** (`transfer_learning = 'coco'`):
|
||||
- Loads base configuration from `data/yolox_*.py` based on `selected_model`
|
||||
- Base parameters are **protected** and used as defaults
|
||||
- User settings from the form only override what's explicitly set
|
||||
- Result: Best of both worlds - proven COCO settings + your customizations
|
||||
|
||||
2. **Sketch/Custom Training** (`transfer_learning = 'sketch'`):
|
||||
- No base configuration loaded
|
||||
- Uses only user-defined parameters from the training form
|
||||
- Full control over all settings
|
||||
|
||||
### Base Configuration Files
|
||||
|
||||
- `yolox_s.py` - YOLOX-Small (depth=0.33, width=0.50)
|
||||
- `yolox_m.py` - YOLOX-Medium (depth=0.67, width=0.75)
|
||||
- `yolox_l.py` - YOLOX-Large (depth=1.0, width=1.0)
|
||||
- `yolox_x.py` - YOLOX-XLarge (depth=1.33, width=1.25)
|
||||
|
||||
### Protected Parameters
|
||||
|
||||
These parameters are defined in base configs and **preserved** unless explicitly overridden:
|
||||
|
||||
**Model Architecture:**
|
||||
- `depth` - Model depth multiplier
|
||||
- `width` - Model width multiplier
|
||||
- `activation` - Activation function (silu)
|
||||
|
||||
**Training Hyperparameters:**
|
||||
- `basic_lr_per_img` - Learning rate per image
|
||||
- `scheduler` - LR scheduler (yoloxwarmcos)
|
||||
- `warmup_epochs` - Warmup epochs
|
||||
- `max_epoch` - Maximum training epochs
|
||||
- `no_aug_epochs` - No augmentation epochs
|
||||
- `min_lr_ratio` - Minimum LR ratio
|
||||
|
||||
**Optimizer:**
|
||||
- `momentum` - SGD momentum
|
||||
- `weight_decay` - Weight decay
|
||||
|
||||
**Augmentation:**
|
||||
- `mosaic_prob` - Mosaic probability
|
||||
- `mixup_prob` - Mixup probability
|
||||
- `hsv_prob` - HSV augmentation probability
|
||||
- `flip_prob` - Flip probability
|
||||
- `degrees` - Rotation degrees
|
||||
- `translate` - Translation
|
||||
- `shear` - Shear
|
||||
- `mosaic_scale` - Mosaic scale range
|
||||
- `mixup_scale` - Mixup scale range
|
||||
- `enable_mixup` - Enable mixup
|
||||
|
||||
**Input/Output:**
|
||||
- `input_size` - Training input size
|
||||
- `test_size` - Testing size
|
||||
- `random_size` - Random size range
|
||||
|
||||
**Evaluation:**
|
||||
- `eval_interval` - Evaluation interval
|
||||
- `print_interval` - Print interval
|
||||
|
||||
## Customizing Base Configurations
|
||||
|
||||
### Adding a New Model
|
||||
|
||||
Create a new file `data/yolox_MODELNAME.py`:
|
||||
|
||||
```python
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding:utf-8 -*-
|
||||
# Base configuration for YOLOX-MODELNAME
|
||||
|
||||
class BaseExp:
|
||||
"""Base experiment configuration for YOLOX-MODELNAME"""
|
||||
|
||||
# Define protected parameters
|
||||
depth = 1.0
|
||||
width = 1.0
|
||||
# ... other parameters
|
||||
```
|
||||
|
||||
### Modifying Parameters
|
||||
|
||||
Edit the corresponding `yolox_*.py` file and update the `BaseExp` class attributes.
|
||||
|
||||
**Example:** To change YOLOX-S max epochs:
|
||||
```python
|
||||
# In data/yolox_s.py
|
||||
class BaseExp:
|
||||
max_epoch = 500 # Changed from 300
|
||||
# ... other parameters
|
||||
```
|
||||
|
||||
## Parameter Priority
|
||||
|
||||
The merge logic follows this priority (highest to lowest):
|
||||
|
||||
1. **User form values** (if explicitly set, not None)
|
||||
2. **Base config values** (if transfer_learning='coco')
|
||||
3. **Default fallbacks** (hardcoded minimums)
|
||||
|
||||
## Example
|
||||
|
||||
### COCO Transfer Learning
|
||||
```
|
||||
User sets in form: max_epoch=100, depth=0.5
|
||||
Base config (yolox_s.py) has: depth=0.33, width=0.50, max_epoch=300
|
||||
|
||||
Result: depth=0.5 (user override), width=0.50 (base), max_epoch=100 (user override)
|
||||
```
|
||||
|
||||
### Sketch Training
|
||||
```
|
||||
User sets in form: max_epoch=100, depth=0.5
|
||||
No base config loaded
|
||||
|
||||
Result: depth=0.5 (user), max_epoch=100 (user), width=1.0 (default fallback)
|
||||
```
|
||||
|
||||
## Debugging
|
||||
|
||||
To see which base config was loaded, check Flask logs:
|
||||
```
|
||||
Loaded base config for yolox-s: ['depth', 'width', 'activation', ...]
|
||||
```
|
||||
|
||||
If base config fails to load:
|
||||
```
|
||||
Warning: Could not load base config for yolox-s: [error message]
|
||||
Falling back to custom settings only
|
||||
```
|
||||
|
||||
@@ -1 +1 @@
|
||||
# Base experiment configurations for YOLOX models
|
||||
# Base experiment configurations for YOLOX models
|
||||
|
||||
@@ -1,79 +1,79 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Test script to demonstrate base configuration loading for YOLOX models
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
|
||||
|
||||
from services.generate_yolox_exp import load_base_config
|
||||
|
||||
def test_base_configs():
|
||||
"""Test loading all base configurations"""
|
||||
models = ['yolox-s', 'yolox-m', 'yolox-l', 'yolox-x']
|
||||
|
||||
print("=" * 80)
|
||||
print("YOLOX Base Configuration Test")
|
||||
print("=" * 80)
|
||||
|
||||
for model in models:
|
||||
print(f"\n{'='*80}")
|
||||
print(f"Model: {model.upper()}")
|
||||
print(f"{'='*80}")
|
||||
|
||||
try:
|
||||
config = load_base_config(model)
|
||||
|
||||
# Group parameters by category
|
||||
arch_params = ['depth', 'width', 'activation']
|
||||
training_params = ['max_epoch', 'warmup_epochs', 'basic_lr_per_img', 'scheduler',
|
||||
'no_aug_epochs', 'min_lr_ratio']
|
||||
optimizer_params = ['momentum', 'weight_decay']
|
||||
augmentation_params = ['mosaic_prob', 'mixup_prob', 'hsv_prob', 'flip_prob',
|
||||
'degrees', 'translate', 'shear', 'mosaic_scale',
|
||||
'mixup_scale', 'enable_mixup']
|
||||
input_params = ['input_size', 'test_size', 'random_size']
|
||||
eval_params = ['eval_interval', 'print_interval']
|
||||
|
||||
print("\n[Architecture]")
|
||||
for param in arch_params:
|
||||
if param in config:
|
||||
print(f" {param:25s} = {config[param]}")
|
||||
|
||||
print("\n[Training Hyperparameters]")
|
||||
for param in training_params:
|
||||
if param in config:
|
||||
print(f" {param:25s} = {config[param]}")
|
||||
|
||||
print("\n[Optimizer]")
|
||||
for param in optimizer_params:
|
||||
if param in config:
|
||||
print(f" {param:25s} = {config[param]}")
|
||||
|
||||
print("\n[Data Augmentation]")
|
||||
for param in augmentation_params:
|
||||
if param in config:
|
||||
print(f" {param:25s} = {config[param]}")
|
||||
|
||||
print("\n[Input/Output]")
|
||||
for param in input_params:
|
||||
if param in config:
|
||||
print(f" {param:25s} = {config[param]}")
|
||||
|
||||
print("\n[Evaluation]")
|
||||
for param in eval_params:
|
||||
if param in config:
|
||||
print(f" {param:25s} = {config[param]}")
|
||||
|
||||
print(f"\n✓ Successfully loaded {len(config)} parameters")
|
||||
|
||||
except Exception as e:
|
||||
print(f"✗ Error loading config: {e}")
|
||||
|
||||
print("\n" + "="*80)
|
||||
print("Test Complete")
|
||||
print("="*80)
|
||||
|
||||
if __name__ == '__main__':
|
||||
test_base_configs()
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Test script to demonstrate base configuration loading for YOLOX models
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
|
||||
|
||||
from services.generate_yolox_exp import load_base_config
|
||||
|
||||
def test_base_configs():
|
||||
"""Test loading all base configurations"""
|
||||
models = ['yolox-s', 'yolox-m', 'yolox-l', 'yolox-x']
|
||||
|
||||
print("=" * 80)
|
||||
print("YOLOX Base Configuration Test")
|
||||
print("=" * 80)
|
||||
|
||||
for model in models:
|
||||
print(f"\n{'='*80}")
|
||||
print(f"Model: {model.upper()}")
|
||||
print(f"{'='*80}")
|
||||
|
||||
try:
|
||||
config = load_base_config(model)
|
||||
|
||||
# Group parameters by category
|
||||
arch_params = ['depth', 'width', 'activation']
|
||||
training_params = ['max_epoch', 'warmup_epochs', 'basic_lr_per_img', 'scheduler',
|
||||
'no_aug_epochs', 'min_lr_ratio']
|
||||
optimizer_params = ['momentum', 'weight_decay']
|
||||
augmentation_params = ['mosaic_prob', 'mixup_prob', 'hsv_prob', 'flip_prob',
|
||||
'degrees', 'translate', 'shear', 'mosaic_scale',
|
||||
'mixup_scale', 'enable_mixup']
|
||||
input_params = ['input_size', 'test_size', 'random_size']
|
||||
eval_params = ['eval_interval', 'print_interval']
|
||||
|
||||
print("\n[Architecture]")
|
||||
for param in arch_params:
|
||||
if param in config:
|
||||
print(f" {param:25s} = {config[param]}")
|
||||
|
||||
print("\n[Training Hyperparameters]")
|
||||
for param in training_params:
|
||||
if param in config:
|
||||
print(f" {param:25s} = {config[param]}")
|
||||
|
||||
print("\n[Optimizer]")
|
||||
for param in optimizer_params:
|
||||
if param in config:
|
||||
print(f" {param:25s} = {config[param]}")
|
||||
|
||||
print("\n[Data Augmentation]")
|
||||
for param in augmentation_params:
|
||||
if param in config:
|
||||
print(f" {param:25s} = {config[param]}")
|
||||
|
||||
print("\n[Input/Output]")
|
||||
for param in input_params:
|
||||
if param in config:
|
||||
print(f" {param:25s} = {config[param]}")
|
||||
|
||||
print("\n[Evaluation]")
|
||||
for param in eval_params:
|
||||
if param in config:
|
||||
print(f" {param:25s} = {config[param]}")
|
||||
|
||||
print(f"\n✓ Successfully loaded {len(config)} parameters")
|
||||
|
||||
except Exception as e:
|
||||
print(f"✗ Error loading config: {e}")
|
||||
|
||||
print("\n" + "="*80)
|
||||
print("Test Complete")
|
||||
print("="*80)
|
||||
|
||||
if __name__ == '__main__':
|
||||
test_base_configs()
|
||||
|
||||
@@ -1,15 +1,15 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding:utf-8 -*-
|
||||
# Base configuration for YOLOX-L model
|
||||
# These parameters are preserved during transfer learning from COCO
|
||||
|
||||
class BaseExp:
|
||||
"""Base experiment configuration for YOLOX-L"""
|
||||
|
||||
# Model architecture (protected - always use these for yolox-l)
|
||||
depth = 1.0
|
||||
width = 1.0
|
||||
|
||||
scheduler = "yoloxwarmcos"
|
||||
|
||||
activation = "silu"
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding:utf-8 -*-
|
||||
# Base configuration for YOLOX-L model
|
||||
# These parameters are preserved during transfer learning from COCO
|
||||
|
||||
class BaseExp:
|
||||
"""Base experiment configuration for YOLOX-L"""
|
||||
|
||||
# Model architecture (protected - always use these for yolox-l)
|
||||
depth = 1.0
|
||||
width = 1.0
|
||||
|
||||
scheduler = "yoloxwarmcos"
|
||||
|
||||
activation = "silu"
|
||||
|
||||
@@ -1,15 +1,15 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding:utf-8 -*-
|
||||
# Base configuration for YOLOX-M model
|
||||
# These parameters are preserved during transfer learning from COCO
|
||||
|
||||
class BaseExp:
|
||||
"""Base experiment configuration for YOLOX-M"""
|
||||
|
||||
# Model architecture (protected - always use these for yolox-m)
|
||||
depth = 0.67
|
||||
width = 0.75
|
||||
|
||||
scheduler = "yoloxwarmcos"
|
||||
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding:utf-8 -*-
|
||||
# Base configuration for YOLOX-M model
|
||||
# These parameters are preserved during transfer learning from COCO
|
||||
|
||||
class BaseExp:
|
||||
"""Base experiment configuration for YOLOX-M"""
|
||||
|
||||
# Model architecture (protected - always use these for yolox-m)
|
||||
depth = 0.67
|
||||
width = 0.75
|
||||
|
||||
scheduler = "yoloxwarmcos"
|
||||
|
||||
activation = "silu"
|
||||
@@ -1,17 +1,17 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding:utf-8 -*-
|
||||
# Base configuration for YOLOX-S model
|
||||
# These parameters are preserved during transfer learning from COCO
|
||||
|
||||
class BaseExp:
|
||||
"""Base experiment configuration for YOLOX-S"""
|
||||
|
||||
# Model architecture (protected - always use these for yolox-s)
|
||||
depth = 0.33
|
||||
width = 0.50
|
||||
|
||||
scheduler = "yoloxwarmcos"
|
||||
|
||||
activation = "silu"
|
||||
|
||||
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding:utf-8 -*-
|
||||
# Base configuration for YOLOX-S model
|
||||
# These parameters are preserved during transfer learning from COCO
|
||||
|
||||
class BaseExp:
|
||||
"""Base experiment configuration for YOLOX-S"""
|
||||
|
||||
# Model architecture (protected - always use these for yolox-s)
|
||||
depth = 0.33
|
||||
width = 0.50
|
||||
|
||||
scheduler = "yoloxwarmcos"
|
||||
|
||||
activation = "silu"
|
||||
|
||||
|
||||
|
||||
@@ -1,15 +1,15 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding:utf-8 -*-
|
||||
# Base configuration for YOLOX-X model
|
||||
# These parameters are preserved during transfer learning from COCO
|
||||
|
||||
class BaseExp:
|
||||
"""Base experiment configuration for YOLOX-X"""
|
||||
|
||||
# Model architecture (protected - always use these for yolox-x)
|
||||
depth = 1.33
|
||||
width = 1.25
|
||||
|
||||
scheduler = "yoloxwarmcos"
|
||||
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding:utf-8 -*-
|
||||
# Base configuration for YOLOX-X model
|
||||
# These parameters are preserved during transfer learning from COCO
|
||||
|
||||
class BaseExp:
|
||||
"""Base experiment configuration for YOLOX-X"""
|
||||
|
||||
# Model architecture (protected - always use these for yolox-x)
|
||||
depth = 1.33
|
||||
width = 1.25
|
||||
|
||||
scheduler = "yoloxwarmcos"
|
||||
|
||||
activation = "silu"
|
||||
@@ -1,4 +1,4 @@
|
||||
# Database module
|
||||
from database.database import db
|
||||
|
||||
__all__ = ['db']
|
||||
# Database module
|
||||
from database.database import db
|
||||
|
||||
__all__ = ['db']
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
// database.js
|
||||
const { Sequelize } = require('sequelize');
|
||||
|
||||
const sequelize = new Sequelize('myapp', 'root', 'root', {
|
||||
host: 'localhost',
|
||||
dialect: 'mysql',
|
||||
logging: false,
|
||||
});
|
||||
|
||||
module.exports = sequelize;
|
||||
// database.js
|
||||
const { Sequelize } = require('sequelize');
|
||||
|
||||
const sequelize = new Sequelize('myapp', 'root', 'root', {
|
||||
host: 'localhost',
|
||||
dialect: 'mysql',
|
||||
logging: false,
|
||||
});
|
||||
|
||||
module.exports = sequelize;
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
from flask_sqlalchemy import SQLAlchemy
|
||||
|
||||
db = SQLAlchemy()
|
||||
|
||||
def init_db(app):
|
||||
"""Initialize database with app context"""
|
||||
db.init_app(app)
|
||||
with app.app_context():
|
||||
db.create_all()
|
||||
from flask_sqlalchemy import SQLAlchemy
|
||||
|
||||
db = SQLAlchemy()
|
||||
|
||||
def init_db(app):
|
||||
"""Initialize database with app context"""
|
||||
db.init_app(app)
|
||||
with app.app_context():
|
||||
db.create_all()
|
||||
|
||||
@@ -1,12 +0,0 @@
|
||||
-- Migration: Add width and height columns to image table
|
||||
-- Date: 2025-11-27
|
||||
|
||||
USE myapp;
|
||||
|
||||
-- Add width and height columns to image table
|
||||
ALTER TABLE `image`
|
||||
ADD COLUMN `width` FLOAT NULL AFTER `image_path`,
|
||||
ADD COLUMN `height` FLOAT NULL AFTER `width`;
|
||||
|
||||
-- Verify the changes
|
||||
DESCRIBE `image`;
|
||||
@@ -1,40 +1,40 @@
|
||||
const { DataTypes } = require('sequelize');
|
||||
const sequelize = require('../database/database.js');
|
||||
|
||||
|
||||
const Annotation = sequelize.define('Annotation', {
|
||||
annotation_id: {
|
||||
type: DataTypes.INTEGER,
|
||||
primaryKey: true,
|
||||
autoIncrement: true,
|
||||
},
|
||||
image_id: {
|
||||
type: DataTypes.INTEGER,
|
||||
allowNull: false,
|
||||
},
|
||||
x: {
|
||||
type: DataTypes.FLOAT,
|
||||
allowNull: false,
|
||||
},
|
||||
y: {
|
||||
type: DataTypes.FLOAT,
|
||||
allowNull: false,
|
||||
},
|
||||
height: {
|
||||
type: DataTypes.FLOAT,
|
||||
allowNull: false,
|
||||
},
|
||||
width: {
|
||||
type: DataTypes.FLOAT,
|
||||
allowNull: false,
|
||||
},
|
||||
Label: {
|
||||
type: DataTypes.STRING,
|
||||
allowNull: false,
|
||||
},
|
||||
}, {
|
||||
tableName: 'annotation',
|
||||
timestamps: false,
|
||||
});
|
||||
|
||||
module.exports = Annotation;
|
||||
const { DataTypes } = require('sequelize');
|
||||
const sequelize = require('../database/database.js');
|
||||
|
||||
|
||||
const Annotation = sequelize.define('Annotation', {
|
||||
annotation_id: {
|
||||
type: DataTypes.INTEGER,
|
||||
primaryKey: true,
|
||||
autoIncrement: true,
|
||||
},
|
||||
image_id: {
|
||||
type: DataTypes.INTEGER,
|
||||
allowNull: false,
|
||||
},
|
||||
x: {
|
||||
type: DataTypes.FLOAT,
|
||||
allowNull: false,
|
||||
},
|
||||
y: {
|
||||
type: DataTypes.FLOAT,
|
||||
allowNull: false,
|
||||
},
|
||||
height: {
|
||||
type: DataTypes.FLOAT,
|
||||
allowNull: false,
|
||||
},
|
||||
width: {
|
||||
type: DataTypes.FLOAT,
|
||||
allowNull: false,
|
||||
},
|
||||
Label: {
|
||||
type: DataTypes.STRING,
|
||||
allowNull: false,
|
||||
},
|
||||
}, {
|
||||
tableName: 'annotation',
|
||||
timestamps: false,
|
||||
});
|
||||
|
||||
module.exports = Annotation;
|
||||
|
||||
@@ -1,23 +1,23 @@
|
||||
from database.database import db
|
||||
|
||||
class Annotation(db.Model):
|
||||
__tablename__ = 'annotation'
|
||||
|
||||
annotation_id = db.Column(db.Integer, primary_key=True, autoincrement=True)
|
||||
image_id = db.Column(db.Integer, nullable=False)
|
||||
x = db.Column(db.Float, nullable=False)
|
||||
y = db.Column(db.Float, nullable=False)
|
||||
height = db.Column(db.Float, nullable=False)
|
||||
width = db.Column(db.Float, nullable=False)
|
||||
Label = db.Column(db.String(255), nullable=False)
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
'annotation_id': self.annotation_id,
|
||||
'image_id': self.image_id,
|
||||
'x': self.x,
|
||||
'y': self.y,
|
||||
'height': self.height,
|
||||
'width': self.width,
|
||||
'Label': self.Label
|
||||
}
|
||||
from database.database import db
|
||||
|
||||
class Annotation(db.Model):
|
||||
__tablename__ = 'annotation'
|
||||
|
||||
annotation_id = db.Column(db.Integer, primary_key=True, autoincrement=True)
|
||||
image_id = db.Column(db.Integer, db.ForeignKey('image.image_id', ondelete='CASCADE'), nullable=False)
|
||||
x = db.Column(db.Float, nullable=False)
|
||||
y = db.Column(db.Float, nullable=False)
|
||||
height = db.Column(db.Float, nullable=False)
|
||||
width = db.Column(db.Float, nullable=False)
|
||||
Label = db.Column(db.String(255), nullable=False)
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
'annotation_id': self.annotation_id,
|
||||
'image_id': self.image_id,
|
||||
'x': self.x,
|
||||
'y': self.y,
|
||||
'height': self.height,
|
||||
'width': self.width,
|
||||
'Label': self.Label
|
||||
}
|
||||
|
||||
21
backend/models/AnnotationProjectMapping.py
Normal file
21
backend/models/AnnotationProjectMapping.py
Normal file
@@ -0,0 +1,21 @@
|
||||
from database.database import db
|
||||
|
||||
class AnnotationProjectMapping(db.Model):
|
||||
"""Mapping between training project details and label studio projects (3NF)"""
|
||||
__tablename__ = 'annotation_project_mapping'
|
||||
|
||||
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
|
||||
project_details_id = db.Column(db.Integer, db.ForeignKey('training_project_details.id', ondelete='CASCADE'), nullable=False)
|
||||
label_studio_project_id = db.Column(db.Integer, db.ForeignKey('label_studio_project.project_id', ondelete='CASCADE'), nullable=False)
|
||||
|
||||
# Unique constraint: each label studio project can only be mapped once per training project details
|
||||
__table_args__ = (
|
||||
db.UniqueConstraint('project_details_id', 'label_studio_project_id', name='uq_annotation_mapping'),
|
||||
)
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
'id': self.id,
|
||||
'project_details_id': self.project_details_id,
|
||||
'label_studio_project_id': self.label_studio_project_id
|
||||
}
|
||||
25
backend/models/ClassMapping.py
Normal file
25
backend/models/ClassMapping.py
Normal file
@@ -0,0 +1,25 @@
|
||||
from database.database import db
|
||||
|
||||
class ClassMapping(db.Model):
|
||||
"""Class name mappings for training project details (3NF)"""
|
||||
__tablename__ = 'class_mapping'
|
||||
|
||||
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
|
||||
project_details_id = db.Column(db.Integer, db.ForeignKey('training_project_details.id', ondelete='CASCADE'), nullable=False)
|
||||
label_studio_project_id = db.Column(db.Integer, db.ForeignKey('label_studio_project.project_id', ondelete='CASCADE'), nullable=False)
|
||||
source_class = db.Column(db.String(255), nullable=False)
|
||||
target_class = db.Column(db.String(255), nullable=False)
|
||||
|
||||
# Unique constraint: each source class can only be mapped once per project details AND label studio project
|
||||
__table_args__ = (
|
||||
db.UniqueConstraint('project_details_id', 'label_studio_project_id', 'source_class', name='uq_class_mapping'),
|
||||
)
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
'id': self.id,
|
||||
'project_details_id': self.project_details_id,
|
||||
'label_studio_project_id': self.label_studio_project_id,
|
||||
'source_class': self.source_class,
|
||||
'target_class': self.target_class
|
||||
}
|
||||
@@ -1,35 +1,35 @@
|
||||
|
||||
const { DataTypes } = require('sequelize');
|
||||
const sequelize = require('../database/database.js');
|
||||
|
||||
const Image = sequelize.define('Image', {
|
||||
image_id: {
|
||||
type: DataTypes.INTEGER,
|
||||
primaryKey: true,
|
||||
autoIncrement: true,
|
||||
},
|
||||
image_path: {
|
||||
type: DataTypes.STRING,
|
||||
allowNull: false,
|
||||
},
|
||||
project_id: {
|
||||
type: DataTypes.INTEGER,
|
||||
allowNull: false,
|
||||
},
|
||||
width: {
|
||||
type: DataTypes.FLOAT,
|
||||
allowNull: true,
|
||||
},
|
||||
height: {
|
||||
type: DataTypes.FLOAT,
|
||||
allowNull: true,
|
||||
},
|
||||
|
||||
}, {
|
||||
tableName: 'image',
|
||||
timestamps: false,
|
||||
});
|
||||
|
||||
module.exports = Image;
|
||||
|
||||
|
||||
|
||||
const { DataTypes } = require('sequelize');
|
||||
const sequelize = require('../database/database.js');
|
||||
|
||||
const Image = sequelize.define('Image', {
|
||||
image_id: {
|
||||
type: DataTypes.INTEGER,
|
||||
primaryKey: true,
|
||||
autoIncrement: true,
|
||||
},
|
||||
image_path: {
|
||||
type: DataTypes.STRING,
|
||||
allowNull: false,
|
||||
},
|
||||
project_id: {
|
||||
type: DataTypes.INTEGER,
|
||||
allowNull: false,
|
||||
},
|
||||
width: {
|
||||
type: DataTypes.FLOAT,
|
||||
allowNull: true,
|
||||
},
|
||||
height: {
|
||||
type: DataTypes.FLOAT,
|
||||
allowNull: true,
|
||||
},
|
||||
|
||||
}, {
|
||||
tableName: 'image',
|
||||
timestamps: false,
|
||||
});
|
||||
|
||||
module.exports = Image;
|
||||
|
||||
|
||||
|
||||
@@ -1,19 +1,19 @@
|
||||
from database.database import db
|
||||
|
||||
class Image(db.Model):
|
||||
__tablename__ = 'image'
|
||||
|
||||
image_id = db.Column(db.Integer, primary_key=True, autoincrement=True)
|
||||
image_path = db.Column(db.String(500), nullable=False)
|
||||
project_id = db.Column(db.Integer, nullable=False)
|
||||
width = db.Column(db.Float)
|
||||
height = db.Column(db.Float)
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
'image_id': self.image_id,
|
||||
'image_path': self.image_path,
|
||||
'project_id': self.project_id,
|
||||
'width': self.width,
|
||||
'height': self.height
|
||||
}
|
||||
from database.database import db
|
||||
|
||||
class Image(db.Model):
|
||||
__tablename__ = 'image'
|
||||
|
||||
image_id = db.Column(db.Integer, primary_key=True, autoincrement=True)
|
||||
image_path = db.Column(db.String(500), nullable=False)
|
||||
project_id = db.Column(db.Integer, db.ForeignKey('label_studio_project.project_id', ondelete='CASCADE'), nullable=False)
|
||||
width = db.Column(db.Integer)
|
||||
height = db.Column(db.Integer)
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
'image_id': self.image_id,
|
||||
'image_path': self.image_path,
|
||||
'project_id': self.project_id,
|
||||
'width': self.width,
|
||||
'height': self.height
|
||||
}
|
||||
|
||||
@@ -1,24 +1,24 @@
|
||||
|
||||
const { DataTypes } = require('sequelize');
|
||||
const sequelize = require('../database/database.js');
|
||||
|
||||
const Label_studio_project = sequelize.define('LabelStudioProject', {
|
||||
project_id: {
|
||||
type: DataTypes.INTEGER,
|
||||
primaryKey: true,
|
||||
unique: true,
|
||||
allowNull: false,
|
||||
},
|
||||
title:{
|
||||
type: DataTypes.STRING,
|
||||
allowNull: false,
|
||||
}
|
||||
|
||||
}, {
|
||||
tableName: 'label_studio_project',
|
||||
timestamps: false,
|
||||
});
|
||||
|
||||
module.exports = Label_studio_project;
|
||||
|
||||
|
||||
|
||||
const { DataTypes } = require('sequelize');
|
||||
const sequelize = require('../database/database.js');
|
||||
|
||||
const Label_studio_project = sequelize.define('LabelStudioProject', {
|
||||
project_id: {
|
||||
type: DataTypes.INTEGER,
|
||||
primaryKey: true,
|
||||
unique: true,
|
||||
allowNull: false,
|
||||
},
|
||||
title:{
|
||||
type: DataTypes.STRING,
|
||||
allowNull: false,
|
||||
}
|
||||
|
||||
}, {
|
||||
tableName: 'label_studio_project',
|
||||
timestamps: false,
|
||||
});
|
||||
|
||||
module.exports = Label_studio_project;
|
||||
|
||||
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
from database.database import db
|
||||
|
||||
class LabelStudioProject(db.Model):
|
||||
__tablename__ = 'label_studio_project'
|
||||
|
||||
project_id = db.Column(db.Integer, primary_key=True, unique=True)
|
||||
title = db.Column(db.String(255), nullable=False)
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
'project_id': self.project_id,
|
||||
'title': self.title
|
||||
}
|
||||
from database.database import db
|
||||
|
||||
class LabelStudioProject(db.Model):
|
||||
__tablename__ = 'label_studio_project'
|
||||
|
||||
project_id = db.Column(db.Integer, primary_key=True, unique=True)
|
||||
title = db.Column(db.String(255), nullable=False)
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
'project_id': self.project_id,
|
||||
'title': self.title
|
||||
}
|
||||
|
||||
23
backend/models/ProjectClass.py
Normal file
23
backend/models/ProjectClass.py
Normal file
@@ -0,0 +1,23 @@
|
||||
from database.database import db
|
||||
|
||||
class ProjectClass(db.Model):
|
||||
"""Class definitions for training projects (3NF)"""
|
||||
__tablename__ = 'project_class'
|
||||
|
||||
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
|
||||
project_id = db.Column(db.Integer, db.ForeignKey('training_project.project_id', ondelete='CASCADE'), nullable=False)
|
||||
class_name = db.Column(db.String(255), nullable=False)
|
||||
display_order = db.Column(db.Integer, default=0)
|
||||
|
||||
# Unique constraint: one class name per project
|
||||
__table_args__ = (
|
||||
db.UniqueConstraint('project_id', 'class_name', name='uq_project_class'),
|
||||
)
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
'id': self.id,
|
||||
'project_id': self.project_id,
|
||||
'class_name': self.class_name,
|
||||
'display_order': self.display_order
|
||||
}
|
||||
21
backend/models/Settings.py
Normal file
21
backend/models/Settings.py
Normal file
@@ -0,0 +1,21 @@
|
||||
from database.database import db
|
||||
|
||||
class Settings(db.Model):
|
||||
__tablename__ = 'settings'
|
||||
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
key = db.Column(db.String(255), unique=True, nullable=False)
|
||||
value = db.Column(db.Text, nullable=True)
|
||||
description = db.Column(db.String(500), nullable=True)
|
||||
created_at = db.Column(db.DateTime, server_default=db.func.now())
|
||||
updated_at = db.Column(db.DateTime, server_default=db.func.now(), onupdate=db.func.now())
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
'id': self.id,
|
||||
'key': self.key,
|
||||
'value': self.value,
|
||||
'description': self.description,
|
||||
'created_at': self.created_at.isoformat() if self.created_at else None,
|
||||
'updated_at': self.updated_at.isoformat() if self.updated_at else None
|
||||
}
|
||||
@@ -1,38 +1,38 @@
|
||||
const { DataTypes } = require('sequelize');
|
||||
const sequelize = require('../database/database.js');
|
||||
|
||||
const Training_Project = sequelize.define('LabelStudioProject', {
|
||||
project_id: {
|
||||
type: DataTypes.INTEGER,
|
||||
primaryKey: true,
|
||||
unique: true,
|
||||
allowNull: false,
|
||||
autoIncrement: true,
|
||||
},
|
||||
title:{
|
||||
type: DataTypes.STRING,
|
||||
allowNull: false,
|
||||
},
|
||||
description: {
|
||||
type: DataTypes.STRING,
|
||||
},
|
||||
classes: {
|
||||
type: DataTypes.JSON,
|
||||
allowNull: false,
|
||||
},
|
||||
project_image: {
|
||||
type: DataTypes.BLOB,
|
||||
},
|
||||
project_image_type: {
|
||||
type: DataTypes.STRING,
|
||||
allowNull: true,
|
||||
}
|
||||
|
||||
}, {
|
||||
tableName: 'training_project',
|
||||
timestamps: false,
|
||||
});
|
||||
|
||||
module.exports = Training_Project;
|
||||
|
||||
|
||||
const { DataTypes } = require('sequelize');
|
||||
const sequelize = require('../database/database.js');
|
||||
|
||||
const Training_Project = sequelize.define('LabelStudioProject', {
|
||||
project_id: {
|
||||
type: DataTypes.INTEGER,
|
||||
primaryKey: true,
|
||||
unique: true,
|
||||
allowNull: false,
|
||||
autoIncrement: true,
|
||||
},
|
||||
title:{
|
||||
type: DataTypes.STRING,
|
||||
allowNull: false,
|
||||
},
|
||||
description: {
|
||||
type: DataTypes.STRING,
|
||||
},
|
||||
classes: {
|
||||
type: DataTypes.JSON,
|
||||
allowNull: false,
|
||||
},
|
||||
project_image: {
|
||||
type: DataTypes.BLOB,
|
||||
},
|
||||
project_image_type: {
|
||||
type: DataTypes.STRING,
|
||||
allowNull: true,
|
||||
}
|
||||
|
||||
}, {
|
||||
tableName: 'training_project',
|
||||
timestamps: false,
|
||||
});
|
||||
|
||||
module.exports = Training_Project;
|
||||
|
||||
|
||||
|
||||
@@ -1,28 +1,36 @@
|
||||
from database.database import db
|
||||
|
||||
class TrainingProject(db.Model):
|
||||
__tablename__ = 'training_project'
|
||||
|
||||
project_id = db.Column(db.Integer, primary_key=True, unique=True, autoincrement=True)
|
||||
title = db.Column(db.String(255), nullable=False)
|
||||
description = db.Column(db.String(500))
|
||||
classes = db.Column(db.JSON, nullable=False)
|
||||
project_image = db.Column(db.LargeBinary)
|
||||
project_image_type = db.Column(db.String(100))
|
||||
|
||||
def to_dict(self):
|
||||
result = {
|
||||
'project_id': self.project_id,
|
||||
'title': self.title,
|
||||
'description': self.description,
|
||||
'classes': self.classes,
|
||||
'project_image_type': self.project_image_type
|
||||
}
|
||||
if self.project_image:
|
||||
import base64
|
||||
base64_data = base64.b64encode(self.project_image).decode('utf-8')
|
||||
mime_type = self.project_image_type or 'image/png'
|
||||
result['project_image'] = f'data:{mime_type};base64,{base64_data}'
|
||||
else:
|
||||
result['project_image'] = None
|
||||
return result
|
||||
from database.database import db
|
||||
|
||||
class TrainingProject(db.Model):
|
||||
__tablename__ = 'training_project'
|
||||
|
||||
project_id = db.Column(db.Integer, primary_key=True, unique=True, autoincrement=True)
|
||||
title = db.Column(db.String(255), nullable=False)
|
||||
description = db.Column(db.String(500))
|
||||
project_image = db.Column(db.LargeBinary)
|
||||
project_image_type = db.Column(db.String(100))
|
||||
|
||||
# Relationship to classes (3NF)
|
||||
classes_relation = db.relationship('ProjectClass', backref='project', lazy=True, cascade='all, delete-orphan')
|
||||
|
||||
def to_dict(self, include_classes=True):
|
||||
result = {
|
||||
'project_id': self.project_id,
|
||||
'title': self.title,
|
||||
'description': self.description,
|
||||
'project_image_type': self.project_image_type
|
||||
}
|
||||
|
||||
# Include classes as array for backwards compatibility
|
||||
if include_classes:
|
||||
from models.ProjectClass import ProjectClass
|
||||
classes = ProjectClass.query.filter_by(project_id=self.project_id).order_by(ProjectClass.display_order).all()
|
||||
result['classes'] = [c.class_name for c in classes]
|
||||
|
||||
if self.project_image:
|
||||
import base64
|
||||
base64_data = base64.b64encode(self.project_image).decode('utf-8')
|
||||
mime_type = self.project_image_type or 'image/png'
|
||||
result['project_image'] = f'data:{mime_type};base64,{base64_data}'
|
||||
else:
|
||||
result['project_image'] = None
|
||||
return result
|
||||
|
||||
@@ -1,33 +1,33 @@
|
||||
const { DataTypes } = require('sequelize');
|
||||
const sequelize = require('../database/database.js');
|
||||
|
||||
const TrainingProjectDetails = sequelize.define('TrainingProjectDetails', {
|
||||
id: {
|
||||
type: DataTypes.INTEGER,
|
||||
primaryKey: true,
|
||||
autoIncrement: true,
|
||||
unique: true,
|
||||
},
|
||||
project_id: {
|
||||
type: DataTypes.INTEGER,
|
||||
allowNull: false,
|
||||
unique: true,
|
||||
},
|
||||
annotation_projects: {
|
||||
type: DataTypes.JSON,
|
||||
allowNull: false,
|
||||
},
|
||||
class_map: {
|
||||
type: DataTypes.JSON,
|
||||
allowNull: true,
|
||||
},
|
||||
description: {
|
||||
type: DataTypes.JSON,
|
||||
allowNull: true,
|
||||
}
|
||||
}, {
|
||||
tableName: 'training_project_details',
|
||||
timestamps: false,
|
||||
});
|
||||
|
||||
module.exports = TrainingProjectDetails;
|
||||
const { DataTypes } = require('sequelize');
|
||||
const sequelize = require('../database/database.js');
|
||||
|
||||
const TrainingProjectDetails = sequelize.define('TrainingProjectDetails', {
|
||||
id: {
|
||||
type: DataTypes.INTEGER,
|
||||
primaryKey: true,
|
||||
autoIncrement: true,
|
||||
unique: true,
|
||||
},
|
||||
project_id: {
|
||||
type: DataTypes.INTEGER,
|
||||
allowNull: false,
|
||||
unique: true,
|
||||
},
|
||||
annotation_projects: {
|
||||
type: DataTypes.JSON,
|
||||
allowNull: false,
|
||||
},
|
||||
class_map: {
|
||||
type: DataTypes.JSON,
|
||||
allowNull: true,
|
||||
},
|
||||
description: {
|
||||
type: DataTypes.JSON,
|
||||
allowNull: true,
|
||||
}
|
||||
}, {
|
||||
tableName: 'training_project_details',
|
||||
timestamps: false,
|
||||
});
|
||||
|
||||
module.exports = TrainingProjectDetails;
|
||||
|
||||
@@ -1,19 +1,35 @@
|
||||
from database.database import db
|
||||
|
||||
class TrainingProjectDetails(db.Model):
|
||||
__tablename__ = 'training_project_details'
|
||||
|
||||
id = db.Column(db.Integer, primary_key=True, unique=True, autoincrement=True)
|
||||
project_id = db.Column(db.Integer, nullable=False, unique=True)
|
||||
annotation_projects = db.Column(db.JSON, nullable=False)
|
||||
class_map = db.Column(db.JSON)
|
||||
description = db.Column(db.JSON)
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
'id': self.id,
|
||||
'project_id': self.project_id,
|
||||
'annotation_projects': self.annotation_projects,
|
||||
'class_map': self.class_map,
|
||||
'description': self.description
|
||||
}
|
||||
from database.database import db
|
||||
|
||||
class TrainingProjectDetails(db.Model):
|
||||
__tablename__ = 'training_project_details'
|
||||
|
||||
id = db.Column(db.Integer, primary_key=True, unique=True, autoincrement=True)
|
||||
project_id = db.Column(db.Integer, db.ForeignKey('training_project.project_id', ondelete='CASCADE'), nullable=False, unique=True)
|
||||
description_text = db.Column(db.Text) # Renamed from 'description' JSON to plain text
|
||||
|
||||
# Relationships (3NF)
|
||||
annotation_mappings = db.relationship('AnnotationProjectMapping', backref='project_details', lazy=True, cascade='all, delete-orphan')
|
||||
class_mappings = db.relationship('ClassMapping', backref='project_details', lazy=True, cascade='all, delete-orphan')
|
||||
|
||||
def to_dict(self, include_mappings=True):
|
||||
result = {
|
||||
'id': self.id,
|
||||
'project_id': self.project_id,
|
||||
'description': self.description_text
|
||||
}
|
||||
|
||||
# Include mappings for backwards compatibility
|
||||
if include_mappings:
|
||||
from models.AnnotationProjectMapping import AnnotationProjectMapping
|
||||
from models.ClassMapping import ClassMapping
|
||||
|
||||
# Get annotation projects as array
|
||||
mappings = AnnotationProjectMapping.query.filter_by(project_details_id=self.id).all()
|
||||
result['annotation_projects'] = [m.label_studio_project_id for m in mappings]
|
||||
|
||||
# Get class map as dictionary (grouped by label_studio_project_id for backwards compatibility)
|
||||
# Return format: {source: target} (flattened across all projects)
|
||||
class_maps = ClassMapping.query.filter_by(project_details_id=self.id).all()
|
||||
result['class_map'] = {cm.source_class: cm.target_class for cm in class_maps}
|
||||
|
||||
return result
|
||||
|
||||
25
backend/models/TrainingSize.py
Normal file
25
backend/models/TrainingSize.py
Normal file
@@ -0,0 +1,25 @@
|
||||
from database.database import db
|
||||
|
||||
class TrainingSize(db.Model):
|
||||
"""Size configurations for training (3NF - replaces JSON arrays)"""
|
||||
__tablename__ = 'training_size'
|
||||
|
||||
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
|
||||
training_id = db.Column(db.Integer, db.ForeignKey('training.id', ondelete='CASCADE'), nullable=False)
|
||||
size_type = db.Column(db.String(50), nullable=False) # 'input_size', 'test_size', 'mosaic_scale', 'mixup_scale'
|
||||
value_order = db.Column(db.Integer, nullable=False, default=0) # Order in array (0=first, 1=second)
|
||||
value = db.Column(db.Float, nullable=False)
|
||||
|
||||
# Composite key ensures proper ordering
|
||||
__table_args__ = (
|
||||
db.UniqueConstraint('training_id', 'size_type', 'value_order', name='uq_training_size'),
|
||||
)
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
'id': self.id,
|
||||
'training_id': self.training_id,
|
||||
'size_type': self.size_type,
|
||||
'value_order': self.value_order,
|
||||
'value': self.value
|
||||
}
|
||||
@@ -1,16 +1,26 @@
|
||||
# Import all models to ensure they are registered with SQLAlchemy
|
||||
from models.TrainingProject import TrainingProject
|
||||
from models.TrainingProjectDetails import TrainingProjectDetails
|
||||
from models.training import Training
|
||||
from models.LabelStudioProject import LabelStudioProject
|
||||
from models.Images import Image
|
||||
from models.Annotation import Annotation
|
||||
|
||||
__all__ = [
|
||||
'TrainingProject',
|
||||
'TrainingProjectDetails',
|
||||
'Training',
|
||||
'LabelStudioProject',
|
||||
'Image',
|
||||
'Annotation'
|
||||
]
|
||||
# Import all models to ensure they are registered with SQLAlchemy
|
||||
from models.TrainingProject import TrainingProject
|
||||
from models.TrainingProjectDetails import TrainingProjectDetails
|
||||
from models.training import Training
|
||||
from models.LabelStudioProject import LabelStudioProject
|
||||
from models.Images import Image
|
||||
from models.Annotation import Annotation
|
||||
from models.Settings import Settings
|
||||
from models.ProjectClass import ProjectClass
|
||||
from models.AnnotationProjectMapping import AnnotationProjectMapping
|
||||
from models.ClassMapping import ClassMapping
|
||||
from models.TrainingSize import TrainingSize
|
||||
|
||||
__all__ = [
|
||||
'TrainingProject',
|
||||
'TrainingProjectDetails',
|
||||
'Training',
|
||||
'LabelStudioProject',
|
||||
'Image',
|
||||
'Annotation',
|
||||
'Settings',
|
||||
'ProjectClass',
|
||||
'AnnotationProjectMapping',
|
||||
'ClassMapping',
|
||||
'TrainingSize'
|
||||
]
|
||||
|
||||
@@ -1,30 +1,30 @@
|
||||
const LabelStudioProject = require('./LabelStudioProject.js');
|
||||
const Annotation = require('./Annotation.js');
|
||||
const Image = require('./Images.js');
|
||||
const sequelize = require('../database/database.js');
|
||||
const TrainingProjectDetails = require('./TrainingProjectDetails.js');
|
||||
const TrainingProject = require('./TrainingProject.js');
|
||||
const Training = require('./training.js');
|
||||
|
||||
|
||||
|
||||
const Project = LabelStudioProject;
|
||||
const Img = Image;
|
||||
const Ann = Annotation;
|
||||
|
||||
// Associations
|
||||
Project.hasMany(Img, { foreignKey: 'project_id' });
|
||||
Img.belongsTo(Project, { foreignKey: 'project_id' });
|
||||
|
||||
Img.hasMany(Ann, { foreignKey: 'image_id' });
|
||||
Ann.belongsTo(Img, { foreignKey: 'image_id' });
|
||||
|
||||
// TrainingProjectDetails <-> TrainingProject
|
||||
TrainingProjectDetails.belongsTo(TrainingProject, { foreignKey: 'project_id' });
|
||||
TrainingProject.hasOne(TrainingProjectDetails, { foreignKey: 'project_id' });
|
||||
|
||||
// Training <-> TrainingProjectDetails
|
||||
Training.belongsTo(TrainingProjectDetails, { foreignKey: 'project_details_id' });
|
||||
TrainingProjectDetails.hasMany(Training, { foreignKey: 'project_details_id' });
|
||||
|
||||
module.exports = { Project, Img, Ann, TrainingProjectDetails, TrainingProject, Training };
|
||||
const LabelStudioProject = require('./LabelStudioProject.js');
|
||||
const Annotation = require('./Annotation.js');
|
||||
const Image = require('./Images.js');
|
||||
const sequelize = require('../database/database.js');
|
||||
const TrainingProjectDetails = require('./TrainingProjectDetails.js');
|
||||
const TrainingProject = require('./TrainingProject.js');
|
||||
const Training = require('./training.js');
|
||||
|
||||
|
||||
|
||||
const Project = LabelStudioProject;
|
||||
const Img = Image;
|
||||
const Ann = Annotation;
|
||||
|
||||
// Associations
|
||||
Project.hasMany(Img, { foreignKey: 'project_id' });
|
||||
Img.belongsTo(Project, { foreignKey: 'project_id' });
|
||||
|
||||
Img.hasMany(Ann, { foreignKey: 'image_id' });
|
||||
Ann.belongsTo(Img, { foreignKey: 'image_id' });
|
||||
|
||||
// TrainingProjectDetails <-> TrainingProject
|
||||
TrainingProjectDetails.belongsTo(TrainingProject, { foreignKey: 'project_id' });
|
||||
TrainingProject.hasOne(TrainingProjectDetails, { foreignKey: 'project_id' });
|
||||
|
||||
// Training <-> TrainingProjectDetails
|
||||
Training.belongsTo(TrainingProjectDetails, { foreignKey: 'project_details_id' });
|
||||
TrainingProjectDetails.hasMany(Training, { foreignKey: 'project_details_id' });
|
||||
|
||||
module.exports = { Project, Img, Ann, TrainingProjectDetails, TrainingProject, Training };
|
||||
|
||||
@@ -1,140 +1,140 @@
|
||||
const { DataTypes } = require('sequelize');
|
||||
const sequelize = require('../database/database.js');
|
||||
|
||||
const Training = sequelize.define('training', {
|
||||
id: {
|
||||
type: DataTypes.INTEGER,
|
||||
autoIncrement: true,
|
||||
unique: true,
|
||||
primaryKey: true
|
||||
},
|
||||
exp_name: {
|
||||
type: DataTypes.STRING(255)
|
||||
},
|
||||
max_epoch: {
|
||||
type: DataTypes.INTEGER
|
||||
},
|
||||
depth: {
|
||||
type: DataTypes.FLOAT
|
||||
},
|
||||
width: {
|
||||
type: DataTypes.FLOAT
|
||||
},
|
||||
activation: {
|
||||
type: DataTypes.STRING(255)
|
||||
},
|
||||
warmup_epochs: {
|
||||
type: DataTypes.INTEGER
|
||||
},
|
||||
warmup_lr: {
|
||||
type: DataTypes.FLOAT
|
||||
},
|
||||
basic_lr_per_img: {
|
||||
type: DataTypes.FLOAT
|
||||
},
|
||||
scheduler: {
|
||||
type: DataTypes.STRING(255)
|
||||
},
|
||||
no_aug_epochs: {
|
||||
type: DataTypes.INTEGER
|
||||
},
|
||||
min_lr_ratio: {
|
||||
type: DataTypes.FLOAT
|
||||
},
|
||||
ema: {
|
||||
type: DataTypes.BOOLEAN
|
||||
},
|
||||
weight_decay: {
|
||||
type: DataTypes.FLOAT
|
||||
},
|
||||
momentum: {
|
||||
type: DataTypes.FLOAT
|
||||
},
|
||||
input_size: {
|
||||
type: DataTypes.JSON
|
||||
},
|
||||
print_interval: {
|
||||
type: DataTypes.INTEGER
|
||||
},
|
||||
eval_interval: {
|
||||
type: DataTypes.INTEGER
|
||||
},
|
||||
save_history_ckpt: {
|
||||
type: DataTypes.BOOLEAN
|
||||
},
|
||||
test_size: {
|
||||
type: DataTypes.JSON
|
||||
},
|
||||
test_conf: {
|
||||
type: DataTypes.FLOAT
|
||||
},
|
||||
nms_thre: {
|
||||
type: DataTypes.FLOAT
|
||||
},
|
||||
multiscale_range: {
|
||||
type: DataTypes.INTEGER
|
||||
},
|
||||
enable_mixup: {
|
||||
type: DataTypes.BOOLEAN
|
||||
},
|
||||
mosaic_prob: {
|
||||
type: DataTypes.FLOAT
|
||||
},
|
||||
mixup_prob: {
|
||||
type: DataTypes.FLOAT
|
||||
},
|
||||
hsv_prob: {
|
||||
type: DataTypes.FLOAT
|
||||
},
|
||||
flip_prob: {
|
||||
type: DataTypes.FLOAT
|
||||
},
|
||||
degrees: {
|
||||
type: DataTypes.FLOAT
|
||||
},
|
||||
mosaic_scale: {
|
||||
type: DataTypes.JSON
|
||||
},
|
||||
mixup_scale: {
|
||||
type: DataTypes.JSON
|
||||
},
|
||||
translate: {
|
||||
type: DataTypes.FLOAT
|
||||
},
|
||||
shear: {
|
||||
type: DataTypes.FLOAT
|
||||
},
|
||||
training_name: {
|
||||
type: DataTypes.STRING(255)
|
||||
},
|
||||
project_details_id: {
|
||||
type: DataTypes.INTEGER,
|
||||
allowNull: false
|
||||
},
|
||||
seed: {
|
||||
type: DataTypes.INTEGER
|
||||
},
|
||||
train: {
|
||||
type: DataTypes.INTEGER
|
||||
},
|
||||
valid: {
|
||||
type: DataTypes.INTEGER
|
||||
},
|
||||
test: {
|
||||
type: DataTypes.INTEGER
|
||||
},
|
||||
selected_model: {
|
||||
type: DataTypes.STRING(255)
|
||||
},
|
||||
transfer_learning: {
|
||||
type: DataTypes.STRING(255)
|
||||
},
|
||||
model_upload: {
|
||||
type: DataTypes.BLOB
|
||||
}
|
||||
}, {
|
||||
tableName: 'training',
|
||||
timestamps: false
|
||||
});
|
||||
|
||||
module.exports = Training;
|
||||
const { DataTypes } = require('sequelize');
|
||||
const sequelize = require('../database/database.js');
|
||||
|
||||
const Training = sequelize.define('training', {
|
||||
id: {
|
||||
type: DataTypes.INTEGER,
|
||||
autoIncrement: true,
|
||||
unique: true,
|
||||
primaryKey: true
|
||||
},
|
||||
exp_name: {
|
||||
type: DataTypes.STRING(255)
|
||||
},
|
||||
max_epoch: {
|
||||
type: DataTypes.INTEGER
|
||||
},
|
||||
depth: {
|
||||
type: DataTypes.FLOAT
|
||||
},
|
||||
width: {
|
||||
type: DataTypes.FLOAT
|
||||
},
|
||||
activation: {
|
||||
type: DataTypes.STRING(255)
|
||||
},
|
||||
warmup_epochs: {
|
||||
type: DataTypes.INTEGER
|
||||
},
|
||||
warmup_lr: {
|
||||
type: DataTypes.FLOAT
|
||||
},
|
||||
basic_lr_per_img: {
|
||||
type: DataTypes.FLOAT
|
||||
},
|
||||
scheduler: {
|
||||
type: DataTypes.STRING(255)
|
||||
},
|
||||
no_aug_epochs: {
|
||||
type: DataTypes.INTEGER
|
||||
},
|
||||
min_lr_ratio: {
|
||||
type: DataTypes.FLOAT
|
||||
},
|
||||
ema: {
|
||||
type: DataTypes.BOOLEAN
|
||||
},
|
||||
weight_decay: {
|
||||
type: DataTypes.FLOAT
|
||||
},
|
||||
momentum: {
|
||||
type: DataTypes.FLOAT
|
||||
},
|
||||
input_size: {
|
||||
type: DataTypes.JSON
|
||||
},
|
||||
print_interval: {
|
||||
type: DataTypes.INTEGER
|
||||
},
|
||||
eval_interval: {
|
||||
type: DataTypes.INTEGER
|
||||
},
|
||||
save_history_ckpt: {
|
||||
type: DataTypes.BOOLEAN
|
||||
},
|
||||
test_size: {
|
||||
type: DataTypes.JSON
|
||||
},
|
||||
test_conf: {
|
||||
type: DataTypes.FLOAT
|
||||
},
|
||||
nms_thre: {
|
||||
type: DataTypes.FLOAT
|
||||
},
|
||||
multiscale_range: {
|
||||
type: DataTypes.INTEGER
|
||||
},
|
||||
enable_mixup: {
|
||||
type: DataTypes.BOOLEAN
|
||||
},
|
||||
mosaic_prob: {
|
||||
type: DataTypes.FLOAT
|
||||
},
|
||||
mixup_prob: {
|
||||
type: DataTypes.FLOAT
|
||||
},
|
||||
hsv_prob: {
|
||||
type: DataTypes.FLOAT
|
||||
},
|
||||
flip_prob: {
|
||||
type: DataTypes.FLOAT
|
||||
},
|
||||
degrees: {
|
||||
type: DataTypes.FLOAT
|
||||
},
|
||||
mosaic_scale: {
|
||||
type: DataTypes.JSON
|
||||
},
|
||||
mixup_scale: {
|
||||
type: DataTypes.JSON
|
||||
},
|
||||
translate: {
|
||||
type: DataTypes.FLOAT
|
||||
},
|
||||
shear: {
|
||||
type: DataTypes.FLOAT
|
||||
},
|
||||
training_name: {
|
||||
type: DataTypes.STRING(255)
|
||||
},
|
||||
project_details_id: {
|
||||
type: DataTypes.INTEGER,
|
||||
allowNull: false
|
||||
},
|
||||
seed: {
|
||||
type: DataTypes.INTEGER
|
||||
},
|
||||
train: {
|
||||
type: DataTypes.INTEGER
|
||||
},
|
||||
valid: {
|
||||
type: DataTypes.INTEGER
|
||||
},
|
||||
test: {
|
||||
type: DataTypes.INTEGER
|
||||
},
|
||||
selected_model: {
|
||||
type: DataTypes.STRING(255)
|
||||
},
|
||||
transfer_learning: {
|
||||
type: DataTypes.STRING(255)
|
||||
},
|
||||
model_upload: {
|
||||
type: DataTypes.BLOB
|
||||
}
|
||||
}, {
|
||||
tableName: 'training',
|
||||
timestamps: false
|
||||
});
|
||||
|
||||
module.exports = Training;
|
||||
|
||||
@@ -1,92 +1,109 @@
|
||||
from database.database import db
|
||||
|
||||
class Training(db.Model):
|
||||
__tablename__ = 'training'
|
||||
|
||||
id = db.Column(db.Integer, primary_key=True, autoincrement=True, unique=True)
|
||||
exp_name = db.Column(db.String(255))
|
||||
max_epoch = db.Column(db.Integer)
|
||||
depth = db.Column(db.Float)
|
||||
width = db.Column(db.Float)
|
||||
activation = db.Column(db.String(255))
|
||||
warmup_epochs = db.Column(db.Integer)
|
||||
warmup_lr = db.Column(db.Float)
|
||||
basic_lr_per_img = db.Column(db.Float)
|
||||
scheduler = db.Column(db.String(255))
|
||||
no_aug_epochs = db.Column(db.Integer)
|
||||
min_lr_ratio = db.Column(db.Float)
|
||||
ema = db.Column(db.Boolean)
|
||||
weight_decay = db.Column(db.Float)
|
||||
momentum = db.Column(db.Float)
|
||||
input_size = db.Column(db.JSON)
|
||||
print_interval = db.Column(db.Integer)
|
||||
eval_interval = db.Column(db.Integer)
|
||||
save_history_ckpt = db.Column(db.Boolean)
|
||||
test_size = db.Column(db.JSON)
|
||||
test_conf = db.Column(db.Float)
|
||||
nms_thre = db.Column(db.Float)
|
||||
multiscale_range = db.Column(db.Integer)
|
||||
enable_mixup = db.Column(db.Boolean)
|
||||
mosaic_prob = db.Column(db.Float)
|
||||
mixup_prob = db.Column(db.Float)
|
||||
hsv_prob = db.Column(db.Float)
|
||||
flip_prob = db.Column(db.Float)
|
||||
degrees = db.Column(db.Float)
|
||||
mosaic_scale = db.Column(db.JSON)
|
||||
mixup_scale = db.Column(db.JSON)
|
||||
translate = db.Column(db.Float)
|
||||
shear = db.Column(db.Float)
|
||||
training_name = db.Column(db.String(255))
|
||||
project_details_id = db.Column(db.Integer, nullable=False)
|
||||
seed = db.Column(db.Integer)
|
||||
train = db.Column(db.Integer)
|
||||
valid = db.Column(db.Integer)
|
||||
test = db.Column(db.Integer)
|
||||
selected_model = db.Column(db.String(255))
|
||||
transfer_learning = db.Column(db.String(255))
|
||||
model_upload = db.Column(db.LargeBinary)
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
'id': self.id,
|
||||
'exp_name': self.exp_name,
|
||||
'max_epoch': self.max_epoch,
|
||||
'depth': self.depth,
|
||||
'width': self.width,
|
||||
'activation': self.activation,
|
||||
'warmup_epochs': self.warmup_epochs,
|
||||
'warmup_lr': self.warmup_lr,
|
||||
'basic_lr_per_img': self.basic_lr_per_img,
|
||||
'scheduler': self.scheduler,
|
||||
'no_aug_epochs': self.no_aug_epochs,
|
||||
'min_lr_ratio': self.min_lr_ratio,
|
||||
'ema': self.ema,
|
||||
'weight_decay': self.weight_decay,
|
||||
'momentum': self.momentum,
|
||||
'input_size': self.input_size,
|
||||
'print_interval': self.print_interval,
|
||||
'eval_interval': self.eval_interval,
|
||||
'save_history_ckpt': self.save_history_ckpt,
|
||||
'test_size': self.test_size,
|
||||
'test_conf': self.test_conf,
|
||||
'nms_thre': self.nms_thre,
|
||||
'multiscale_range': self.multiscale_range,
|
||||
'enable_mixup': self.enable_mixup,
|
||||
'mosaic_prob': self.mosaic_prob,
|
||||
'mixup_prob': self.mixup_prob,
|
||||
'hsv_prob': self.hsv_prob,
|
||||
'flip_prob': self.flip_prob,
|
||||
'degrees': self.degrees,
|
||||
'mosaic_scale': self.mosaic_scale,
|
||||
'mixup_scale': self.mixup_scale,
|
||||
'translate': self.translate,
|
||||
'shear': self.shear,
|
||||
'training_name': self.training_name,
|
||||
'project_details_id': self.project_details_id,
|
||||
'seed': self.seed,
|
||||
'train': self.train,
|
||||
'valid': self.valid,
|
||||
'test': self.test,
|
||||
'selected_model': self.selected_model,
|
||||
'transfer_learning': self.transfer_learning
|
||||
}
|
||||
from database.database import db
|
||||
|
||||
class Training(db.Model):
|
||||
__tablename__ = 'training'
|
||||
|
||||
id = db.Column(db.Integer, primary_key=True, autoincrement=True, unique=True)
|
||||
exp_name = db.Column(db.String(255))
|
||||
max_epoch = db.Column(db.Integer)
|
||||
depth = db.Column(db.Float)
|
||||
width = db.Column(db.Float)
|
||||
activation = db.Column(db.String(255))
|
||||
warmup_epochs = db.Column(db.Integer)
|
||||
warmup_lr = db.Column(db.Float)
|
||||
basic_lr_per_img = db.Column(db.Float)
|
||||
scheduler = db.Column(db.String(255))
|
||||
no_aug_epochs = db.Column(db.Integer)
|
||||
min_lr_ratio = db.Column(db.Float)
|
||||
ema = db.Column(db.Boolean)
|
||||
weight_decay = db.Column(db.Float)
|
||||
momentum = db.Column(db.Float)
|
||||
# input_size moved to TrainingSize table
|
||||
print_interval = db.Column(db.Integer)
|
||||
eval_interval = db.Column(db.Integer)
|
||||
save_history_ckpt = db.Column(db.Boolean)
|
||||
# test_size moved to TrainingSize table
|
||||
test_conf = db.Column(db.Float)
|
||||
nms_thre = db.Column(db.Float)
|
||||
multiscale_range = db.Column(db.Integer)
|
||||
enable_mixup = db.Column(db.Boolean)
|
||||
mosaic_prob = db.Column(db.Float)
|
||||
mixup_prob = db.Column(db.Float)
|
||||
hsv_prob = db.Column(db.Float)
|
||||
flip_prob = db.Column(db.Float)
|
||||
degrees = db.Column(db.Float)
|
||||
# mosaic_scale moved to TrainingSize table
|
||||
# mixup_scale moved to TrainingSize table
|
||||
translate = db.Column(db.Float)
|
||||
shear = db.Column(db.Float)
|
||||
training_name = db.Column(db.String(255))
|
||||
project_details_id = db.Column(db.Integer, db.ForeignKey('training_project_details.id', ondelete='CASCADE'), nullable=False)
|
||||
seed = db.Column(db.Integer)
|
||||
train = db.Column(db.Integer)
|
||||
valid = db.Column(db.Integer)
|
||||
test = db.Column(db.Integer)
|
||||
selected_model = db.Column(db.String(255))
|
||||
transfer_learning = db.Column(db.String(255))
|
||||
model_upload = db.Column(db.LargeBinary)
|
||||
|
||||
# Relationship to size configurations (3NF)
|
||||
size_configs = db.relationship('TrainingSize', backref='training', lazy=True, cascade='all, delete-orphan')
|
||||
|
||||
def to_dict(self, include_sizes=True):
|
||||
result = {
|
||||
'id': self.id,
|
||||
'exp_name': self.exp_name,
|
||||
'max_epoch': self.max_epoch,
|
||||
'depth': self.depth,
|
||||
'width': self.width,
|
||||
'activation': self.activation,
|
||||
'warmup_epochs': self.warmup_epochs,
|
||||
'warmup_lr': self.warmup_lr,
|
||||
'basic_lr_per_img': self.basic_lr_per_img,
|
||||
'scheduler': self.scheduler,
|
||||
'no_aug_epochs': self.no_aug_epochs,
|
||||
'min_lr_ratio': self.min_lr_ratio,
|
||||
'ema': self.ema,
|
||||
'weight_decay': self.weight_decay,
|
||||
'momentum': self.momentum,
|
||||
'print_interval': self.print_interval,
|
||||
'eval_interval': self.eval_interval,
|
||||
'save_history_ckpt': self.save_history_ckpt,
|
||||
'test_conf': self.test_conf,
|
||||
'nms_thre': self.nms_thre,
|
||||
'multiscale_range': self.multiscale_range,
|
||||
'enable_mixup': self.enable_mixup,
|
||||
'mosaic_prob': self.mosaic_prob,
|
||||
'mixup_prob': self.mixup_prob,
|
||||
'hsv_prob': self.hsv_prob,
|
||||
'flip_prob': self.flip_prob,
|
||||
'degrees': self.degrees,
|
||||
'translate': self.translate,
|
||||
'shear': self.shear,
|
||||
'training_name': self.training_name,
|
||||
'project_details_id': self.project_details_id,
|
||||
'seed': self.seed,
|
||||
'train': self.train,
|
||||
'valid': self.valid,
|
||||
'test': self.test,
|
||||
'selected_model': self.selected_model,
|
||||
'transfer_learning': self.transfer_learning
|
||||
}
|
||||
|
||||
# Include size arrays for backwards compatibility
|
||||
if include_sizes:
|
||||
from models.TrainingSize import TrainingSize
|
||||
|
||||
def get_size_array(size_type):
|
||||
sizes = TrainingSize.query.filter_by(
|
||||
training_id=self.id,
|
||||
size_type=size_type
|
||||
).order_by(TrainingSize.value_order).all()
|
||||
return [s.value for s in sizes] if sizes else None
|
||||
|
||||
result['input_size'] = get_size_array('input_size')
|
||||
result['test_size'] = get_size_array('test_size')
|
||||
result['mosaic_scale'] = get_size_array('mosaic_scale')
|
||||
result['mixup_scale'] = get_size_array('mixup_scale')
|
||||
|
||||
return result
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
Flask==3.0.0
|
||||
Flask-CORS==4.0.0
|
||||
Flask-SQLAlchemy==3.1.1
|
||||
SQLAlchemy==2.0.23
|
||||
PyMySQL==1.1.0
|
||||
python-dotenv==1.0.0
|
||||
requests==2.31.0
|
||||
Pillow==10.1.0
|
||||
Flask==3.0.0
|
||||
Flask-CORS==4.0.0
|
||||
Flask-SQLAlchemy==3.1.1
|
||||
SQLAlchemy==2.0.23
|
||||
PyMySQL==1.1.0
|
||||
python-dotenv==1.0.0
|
||||
requests==2.31.0
|
||||
Pillow==10.1.0
|
||||
|
||||
@@ -1 +1 @@
|
||||
# Routes module
|
||||
# Routes module
|
||||
|
||||
@@ -1,496 +1,496 @@
|
||||
const express = require('express');
|
||||
const multer = require('multer');
|
||||
const upload = multer();
|
||||
const TrainingProject = require('../models/TrainingProject.js');
|
||||
const LabelStudioProject = require('../models/LabelStudioProject.js')
|
||||
const { seedLabelStudio, updateStatus } = require('../services/seed-label-studio.js');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const {generateTrainingJson} = require('../services/generate-json-yolox.js')
|
||||
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
// Ensure JSON bodies are parsed for all routes
|
||||
router.use(express.json());
|
||||
|
||||
router.get('/seed', async (req, res) => {
|
||||
const result = await seedLabelStudio();
|
||||
res.json(result);
|
||||
});
|
||||
|
||||
|
||||
|
||||
// Trigger generate-json-yolox.js
|
||||
|
||||
router.post('/generate-yolox-json', async (req, res) => {
|
||||
const { project_id } = req.body;
|
||||
if (!project_id) {
|
||||
return res.status(400).json({ message: 'Missing project_id in request body' });
|
||||
}
|
||||
try {
|
||||
// Generate COCO JSONs
|
||||
// Find all TrainingProjectDetails for this project
|
||||
const TrainingProjectDetails = require('../models/TrainingProjectDetails.js');
|
||||
const detailsRows = await TrainingProjectDetails.findAll({ where: { project_id } });
|
||||
if (!detailsRows || detailsRows.length === 0) {
|
||||
return res.status(404).json({ message: 'No TrainingProjectDetails found for project ' + project_id });
|
||||
}
|
||||
// For each details row, generate coco.jsons and exp.py in projectfolder/project_details_id
|
||||
const Training = require('../models/training.js');
|
||||
const { saveYoloxExp } = require('../services/generate-yolox-exp.js');
|
||||
const TrainingProject = require('../models/TrainingProject.js');
|
||||
const trainingProject = await TrainingProject.findByPk(project_id);
|
||||
const projectName = trainingProject.name ? trainingProject.name.replace(/\s+/g, '_') : `project_${project_id}`;
|
||||
for (const details of detailsRows) {
|
||||
const detailsId = details.id;
|
||||
await generateTrainingJson(detailsId);
|
||||
const trainings = await Training.findAll({ where: { project_details_id: detailsId } });
|
||||
if (trainings.length === 0) continue;
|
||||
// For each training, save exp.py in projectfolder/project_details_id
|
||||
const outDir = path.join(__dirname, '..', projectName, String(detailsId));
|
||||
if (!fs.existsSync(outDir)) fs.mkdirSync(outDir, { recursive: true });
|
||||
for (const training of trainings) {
|
||||
const expFilePath = path.join(outDir, 'exp.py');
|
||||
await saveYoloxExp(training.id, expFilePath);
|
||||
}
|
||||
}
|
||||
|
||||
// Find all trainings for this project
|
||||
// ...existing code...
|
||||
res.json({ message: 'YOLOX JSON and exp.py generated for project ' + project_id });
|
||||
} catch (err) {
|
||||
console.error('Error generating YOLOX JSON:', err);
|
||||
res.status(500).json({ message: 'Failed to generate YOLOX JSON', error: err.message });
|
||||
}
|
||||
});
|
||||
|
||||
// Start YOLOX training
|
||||
const { spawn } = require('child_process');
|
||||
router.post('/start-yolox-training', async (req, res) => {
|
||||
try {
|
||||
const { project_id, training_id } = req.body;
|
||||
// Get project name
|
||||
const trainingProject = await TrainingProject.findByPk(project_id);
|
||||
const projectName = trainingProject.name ? trainingProject.name.replace(/\s+/g, '_') : `project_${project_id}`;
|
||||
// Look up training row by id or project_details_id
|
||||
const Training = require('../models/training.js');
|
||||
let trainingRow = await Training.findByPk(training_id);
|
||||
if (!trainingRow) {
|
||||
trainingRow = await Training.findOne({ where: { project_details_id: training_id } });
|
||||
}
|
||||
if (!trainingRow) {
|
||||
return res.status(404).json({ error: `Training row not found for id or project_details_id ${training_id}` });
|
||||
}
|
||||
const project_details_id = trainingRow.project_details_id;
|
||||
// Use the generated exp.py from the correct project folder
|
||||
const outDir = path.join(__dirname, '..', projectName, String(project_details_id));
|
||||
const yoloxMainDir = '/home/kitraining/Yolox/YOLOX-main';
|
||||
const expSrc = path.join(outDir, 'exp.py');
|
||||
if (!fs.existsSync(expSrc)) {
|
||||
return res.status(500).json({ error: `exp.py not found at ${expSrc}` });
|
||||
}
|
||||
// Source venv and run YOLOX training in YOLOX-main folder
|
||||
const yoloxVenv = '/home/kitraining/Yolox/yolox_venv/bin/activate';
|
||||
// Determine model argument based on selected_model and transfer_learning
|
||||
let modelArg = '';
|
||||
let cmd = '';
|
||||
if (
|
||||
trainingRow.transfer_learning &&
|
||||
typeof trainingRow.transfer_learning === 'string' &&
|
||||
trainingRow.transfer_learning.toLowerCase() === 'coco'
|
||||
) {
|
||||
// If transfer_learning is 'coco', add -o and modelArg
|
||||
modelArg = ` -c /home/kitraining/Yolox/YOLOX-main/pretrained/${trainingRow.selected_model}`;
|
||||
cmd = `bash -c 'source ${yoloxVenv} && python tools/train.py -f ${expSrc} -d 1 -b 8 --fp16 -o ${modelArg}.pth --cache'`;
|
||||
} else if (
|
||||
trainingRow.selected_model &&
|
||||
trainingRow.selected_model.toLowerCase() === 'coco' &&
|
||||
(!trainingRow.transfer_learning || trainingRow.transfer_learning === false)
|
||||
) {
|
||||
// If selected_model is 'coco' and not transfer_learning, add modelArg only
|
||||
modelArg = ` -c /pretrained/${trainingRow.selected_model}`;
|
||||
cmd = `bash -c 'source ${yoloxVenv} && python tools/train.py -f ${expSrc} -d 1 -b 8 --fp16 -o ${modelArg}.pth --cache'`;
|
||||
} else {
|
||||
// Default: no modelArg
|
||||
cmd = `bash -c 'source ${yoloxVenv} && python tools/train.py -f ${expSrc} -d 1 -b 8 --fp16' --cache`;
|
||||
}
|
||||
console.log(cmd)
|
||||
const child = spawn(cmd, { shell: true, cwd: yoloxMainDir });
|
||||
child.stdout.pipe(process.stdout);
|
||||
child.stderr.pipe(process.stderr);
|
||||
|
||||
res.json({ message: 'Training started' });
|
||||
} catch (err) {
|
||||
res.status(500).json({ error: 'Failed to start training', details: err.message });
|
||||
}
|
||||
});
|
||||
|
||||
// Get YOLOX training log
|
||||
router.get('/training-log', async (req, res) => {
|
||||
try {
|
||||
const { project_id, training_id } = req.query;
|
||||
const trainingProject = await TrainingProject.findByPk(project_id);
|
||||
const projectName = trainingProject.name ? trainingProject.name.replace(/\s+/g, '_') : `project_${project_id}`;
|
||||
const outDir = path.join(__dirname, '..', projectName, String(training_id));
|
||||
const logPath = path.join(outDir, 'training.log');
|
||||
if (!fs.existsSync(logPath)) {
|
||||
return res.status(404).json({ error: 'Log not found' });
|
||||
}
|
||||
const logData = fs.readFileSync(logPath, 'utf8');
|
||||
res.json({ log: logData });
|
||||
} catch (err) {
|
||||
res.status(500).json({ error: 'Failed to fetch log', details: err.message });
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/training-projects', upload.single('project_image'), async (req, res) => {
|
||||
try {
|
||||
const { title, description } = req.body;
|
||||
const classes = JSON.parse(req.body.classes);
|
||||
const project_image = req.file ? req.file.buffer : null;
|
||||
const project_image_type = req.file ? req.file.mimetype : null;
|
||||
await TrainingProject.create({
|
||||
title,
|
||||
description,
|
||||
classes,
|
||||
project_image,
|
||||
project_image_type
|
||||
});
|
||||
res.json({ message: 'Project created!' });
|
||||
} catch (error) {
|
||||
console.error('Error creating project:', error);
|
||||
res.status(500).json({ message: 'Failed to create project', error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
router.get('/training-projects', async (req, res) => {
|
||||
try {
|
||||
const projects = await TrainingProject.findAll();
|
||||
// Convert BLOB to base64 data URL for each project
|
||||
const serialized = projects.map(project => {
|
||||
const plain = project.get({ plain: true });
|
||||
if (plain.project_image) {
|
||||
const base64 = Buffer.from(plain.project_image).toString('base64');
|
||||
const mimeType = plain.project_image_type || 'image/png';
|
||||
plain.project_image = `data:${mimeType};base64,${base64}`;
|
||||
}
|
||||
return plain;
|
||||
});
|
||||
res.json(serialized);
|
||||
} catch (error) {
|
||||
res.status(500).json({ message: 'Failed to fetch projects', error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
router.get('/update-status', async (req, res) => {
|
||||
res.json(updateStatus)
|
||||
})
|
||||
|
||||
router.get('/label-studio-projects', async (req, res) => {
|
||||
try {
|
||||
const LabelStudioProject = require('../models/LabelStudioProject.js');
|
||||
const Image = require('../models/Images.js');
|
||||
const Annotation = require('../models/Annotation.js');
|
||||
const labelStudioProjects = await LabelStudioProject.findAll();
|
||||
const projectsWithCounts = await Promise.all(labelStudioProjects.map(async project => {
|
||||
const plain = project.get({ plain: true });
|
||||
// Get all images for this project
|
||||
const images = await Image.findAll({ where: { project_id: plain.project_id } });
|
||||
let annotationCounts = {};
|
||||
if (images.length > 0) {
|
||||
const imageIds = images.map(img => img.image_id);
|
||||
// Get all annotations for these images
|
||||
const annotations = await Annotation.findAll({ where: { image_id: imageIds } });
|
||||
// Count by label
|
||||
for (const ann of annotations) {
|
||||
const label = ann.Label;
|
||||
annotationCounts[label] = (annotationCounts[label] || 0) + 1;
|
||||
}
|
||||
}
|
||||
plain.annotationCounts = annotationCounts;
|
||||
return plain;
|
||||
}));
|
||||
res.json(projectsWithCounts);
|
||||
} catch (error) {
|
||||
res.status(500).json({ message: 'Failed to fetch projects', error: error.message });
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
// POST endpoint to create TrainingProjectDetails with all fields
|
||||
router.post('/training-project-details', async (req, res) => {
|
||||
try {
|
||||
const {
|
||||
project_id,
|
||||
annotation_projects,
|
||||
class_map,
|
||||
description
|
||||
} = req.body;
|
||||
if (!project_id || !annotation_projects) {
|
||||
return res.status(400).json({ message: 'Missing required fields' });
|
||||
}
|
||||
const TrainingProjectDetails = require('../models/TrainingProjectDetails.js');
|
||||
const created = await TrainingProjectDetails.create({
|
||||
project_id,
|
||||
annotation_projects,
|
||||
class_map: class_map || null,
|
||||
description: description || null
|
||||
});
|
||||
res.json({ message: 'TrainingProjectDetails created', details: created });
|
||||
} catch (error) {
|
||||
res.status(500).json({ message: 'Failed to create TrainingProjectDetails', error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// GET endpoint to fetch all TrainingProjectDetails
|
||||
router.get('/training-project-details', async (req, res) => {
|
||||
try {
|
||||
const TrainingProjectDetails = require('../models/TrainingProjectDetails.js');
|
||||
const details = await TrainingProjectDetails.findAll();
|
||||
res.json(details);
|
||||
} catch (error) {
|
||||
res.status(500).json({ message: 'Failed to fetch TrainingProjectDetails', error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// PUT endpoint to update class_map and description in TrainingProjectDetails
|
||||
router.put('/training-project-details', async (req, res) => {
|
||||
try {
|
||||
const { project_id, class_map, description } = req.body;
|
||||
if (!project_id || !class_map || !description) {
|
||||
return res.status(400).json({ message: 'Missing required fields' });
|
||||
}
|
||||
const TrainingProjectDetails = require('../models/TrainingProjectDetails.js');
|
||||
const details = await TrainingProjectDetails.findOne({ where: { project_id } });
|
||||
if (!details) {
|
||||
return res.status(404).json({ message: 'TrainingProjectDetails not found' });
|
||||
}
|
||||
details.class_map = class_map;
|
||||
details.description = description;
|
||||
await details.save();
|
||||
res.json({ message: 'Class map and description updated', details });
|
||||
} catch (error) {
|
||||
res.status(500).json({ message: 'Failed to update class map or description', error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// POST endpoint to receive YOLOX settings and save to DB (handles multipart/form-data)
|
||||
router.post('/yolox-settings', upload.any(), async (req, res) => {
|
||||
try {
|
||||
const settings = req.body;
|
||||
// Debug: Log all received fields and types
|
||||
console.log('--- YOLOX settings received ---');
|
||||
console.log('settings:', settings);
|
||||
if (req.files && req.files.length > 0) {
|
||||
console.log('Files received:', req.files.map(f => ({ fieldname: f.fieldname, originalname: f.originalname, size: f.size })));
|
||||
}
|
||||
// Declare requiredFields once
|
||||
const requiredFields = ['project_details_id', 'exp_name', 'max_epoch', 'depth', 'width', 'activation', 'train', 'valid', 'test', 'selected_model', 'transfer_learning'];
|
||||
// Log types of required fields
|
||||
requiredFields.forEach(field => {
|
||||
console.log(`Field '${field}': value='${settings[field]}', type='${typeof settings[field]}'`);
|
||||
});
|
||||
// Map select_model to selected_model if present
|
||||
if (settings && settings.select_model && !settings.selected_model) {
|
||||
settings.selected_model = settings.select_model;
|
||||
delete settings.select_model;
|
||||
}
|
||||
// Lookup project_details_id from project_id
|
||||
if (!settings.project_id || isNaN(Number(settings.project_id))) {
|
||||
throw new Error('Missing or invalid project_id in request. Cannot assign training to a project.');
|
||||
}
|
||||
const TrainingProjectDetails = require('../models/TrainingProjectDetails.js');
|
||||
let details = await TrainingProjectDetails.findOne({ where: { project_id: settings.project_id } });
|
||||
if (!details) {
|
||||
details = await TrainingProjectDetails.create({
|
||||
project_id: settings.project_id,
|
||||
annotation_projects: [],
|
||||
class_map: null,
|
||||
description: null
|
||||
});
|
||||
}
|
||||
settings.project_details_id = details.id;
|
||||
// Map 'act' from frontend to 'activation' for DB
|
||||
if (settings.act !== undefined) {
|
||||
settings.activation = settings.act;
|
||||
delete settings.act;
|
||||
}
|
||||
// Type conversion for DB compatibility
|
||||
[
|
||||
'max_epoch', 'depth', 'width', 'warmup_epochs', 'warmup_lr', 'no_aug_epochs', 'min_lr_ratio', 'weight_decay', 'momentum', 'print_interval', 'eval_interval', 'test_conf', 'nmsthre', 'multiscale_range', 'degrees', 'translate', 'shear', 'train', 'valid', 'test'
|
||||
].forEach(f => {
|
||||
if (settings[f] !== undefined) settings[f] = Number(settings[f]);
|
||||
});
|
||||
// Improved boolean conversion
|
||||
['ema', 'enable_mixup', 'save_history_ckpt'].forEach(f => {
|
||||
if (settings[f] !== undefined) {
|
||||
if (typeof settings[f] === 'string') {
|
||||
settings[f] = settings[f].toLowerCase() === 'true';
|
||||
} else {
|
||||
settings[f] = Boolean(settings[f]);
|
||||
}
|
||||
}
|
||||
});
|
||||
// Improved array conversion
|
||||
['mosaic_scale', 'mixup_scale', 'scale'].forEach(f => {
|
||||
if (settings[f] && typeof settings[f] === 'string') {
|
||||
settings[f] = settings[f]
|
||||
.split(',')
|
||||
.map(s => Number(s.trim()))
|
||||
.filter(n => !isNaN(n));
|
||||
}
|
||||
});
|
||||
// Trim all string fields
|
||||
Object.keys(settings).forEach(f => {
|
||||
if (typeof settings[f] === 'string') settings[f] = settings[f].trim();
|
||||
});
|
||||
// Set default for transfer_learning if missing
|
||||
if (settings.transfer_learning === undefined) settings.transfer_learning = false;
|
||||
// Convert empty string seed to null
|
||||
if ('seed' in settings && (settings.seed === '' || settings.seed === undefined)) {
|
||||
settings.seed = null;
|
||||
}
|
||||
// Validate required fields for training table
|
||||
for (const field of requiredFields) {
|
||||
if (settings[field] === undefined || settings[field] === null || settings[field] === '') {
|
||||
console.error('Missing required field:', field, 'Value:', settings[field]);
|
||||
throw new Error('Missing required field: ' + field);
|
||||
}
|
||||
}
|
||||
console.log('Received YOLOX settings:', settings);
|
||||
// Handle uploaded model file (ckpt_upload)
|
||||
if (req.files && req.files.length > 0) {
|
||||
const ckptFile = req.files.find(f => f.fieldname === 'ckpt_upload');
|
||||
if (ckptFile) {
|
||||
const uploadDir = path.join(__dirname, '..', 'uploads');
|
||||
if (!fs.existsSync(uploadDir)) fs.mkdirSync(uploadDir);
|
||||
const filename = ckptFile.originalname || `uploaded_model_${settings.project_id}.pth`;
|
||||
const filePath = path.join(uploadDir, filename);
|
||||
fs.writeFileSync(filePath, ckptFile.buffer);
|
||||
settings.model_upload = filePath;
|
||||
}
|
||||
}
|
||||
// Save settings to DB only (no file)
|
||||
const { pushYoloxExpToDb } = require('../services/push-yolox-exp.js');
|
||||
const training = await pushYoloxExpToDb(settings);
|
||||
res.json({ message: 'YOLOX settings saved to DB', training });
|
||||
} catch (error) {
|
||||
console.error('Error in /api/yolox-settings:', error.stack || error);
|
||||
res.status(500).json({ message: 'Failed to save YOLOX settings', error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// POST endpoint to receive binary model file and save to disk (not DB)
|
||||
router.post('/yolox-settings/upload', async (req, res) => {
|
||||
try {
|
||||
const projectId = req.query.project_id;
|
||||
if (!projectId) return res.status(400).json({ message: 'Missing project_id in query' });
|
||||
// Save file to disk
|
||||
const uploadDir = path.join(__dirname, '..', 'uploads');
|
||||
if (!fs.existsSync(uploadDir)) fs.mkdirSync(uploadDir);
|
||||
const filename = req.headers['x-upload-filename'] || `uploaded_model_${projectId}.pth`;
|
||||
const filePath = path.join(uploadDir, filename);
|
||||
const chunks = [];
|
||||
req.on('data', chunk => chunks.push(chunk));
|
||||
req.on('end', async () => {
|
||||
const buffer = Buffer.concat(chunks);
|
||||
fs.writeFile(filePath, buffer, async err => {
|
||||
if (err) {
|
||||
console.error('Error saving file:', err);
|
||||
return res.status(500).json({ message: 'Failed to save model file', error: err.message });
|
||||
}
|
||||
// Update latest training row for this project with file path
|
||||
try {
|
||||
const TrainingProjectDetails = require('../models/TrainingProjectDetails.js');
|
||||
const Training = require('../models/training.js');
|
||||
// Find details row for this project
|
||||
const details = await TrainingProjectDetails.findOne({ where: { project_id: projectId } });
|
||||
if (!details) return res.status(404).json({ message: 'No TrainingProjectDetails found for project_id' });
|
||||
// Find latest training for this details row
|
||||
const training = await Training.findOne({ where: { project_details_id: details.id }, order: [['createdAt', 'DESC']] });
|
||||
if (!training) return res.status(404).json({ message: 'No training found for project_id' });
|
||||
// Save file path to model_upload field
|
||||
training.model_upload = filePath;
|
||||
await training.save();
|
||||
res.json({ message: 'Model file uploaded and saved to disk', filename, trainingId: training.id });
|
||||
} catch (dbErr) {
|
||||
console.error('Error updating training with file path:', dbErr);
|
||||
res.status(500).json({ message: 'File saved but failed to update training row', error: dbErr.message });
|
||||
}
|
||||
});
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error in /api/yolox-settings/upload:', error.stack || error);
|
||||
res.status(500).json({ message: 'Failed to upload model file', error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// GET endpoint to fetch all trainings (optionally filtered by project_id)
|
||||
router.get('/trainings', async (req, res) => {
|
||||
try {
|
||||
const project_id = req.query.project_id;
|
||||
const TrainingProjectDetails = require('../models/TrainingProjectDetails.js');
|
||||
const Training = require('../models/training.js');
|
||||
if (project_id) {
|
||||
// Find all details rows for this project
|
||||
const detailsRows = await TrainingProjectDetails.findAll({ where: { project_id } });
|
||||
if (!detailsRows || detailsRows.length === 0) return res.json([]);
|
||||
// Get all trainings linked to any details row for this project
|
||||
const detailsIds = detailsRows.map(d => d.id);
|
||||
const trainings = await Training.findAll({ where: { project_details_id: detailsIds } });
|
||||
return res.json(trainings);
|
||||
} else {
|
||||
// Return all trainings if no project_id is specified
|
||||
const trainings = await Training.findAll();
|
||||
return res.json(trainings);
|
||||
}
|
||||
} catch (error) {
|
||||
res.status(500).json({ message: 'Failed to fetch trainings', error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// DELETE endpoint to remove a training by id
|
||||
router.delete('/trainings/:id', async (req, res) => {
|
||||
try {
|
||||
const Training = require('../models/training.js');
|
||||
const id = req.params.id;
|
||||
const deleted = await Training.destroy({ where: { id } });
|
||||
if (deleted) {
|
||||
res.json({ message: 'Training deleted' });
|
||||
} else {
|
||||
res.status(404).json({ message: 'Training not found' });
|
||||
}
|
||||
} catch (error) {
|
||||
res.status(500).json({ message: 'Failed to delete training', error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// DELETE endpoint to remove a training project and all related entries
|
||||
router.delete('/training-projects/:id', async (req, res) => {
|
||||
try {
|
||||
const projectId = req.params.id;
|
||||
const TrainingProject = require('../models/TrainingProject.js');
|
||||
const TrainingProjectDetails = require('../models/TrainingProjectDetails.js');
|
||||
const Training = require('../models/training.js');
|
||||
// Find details row(s) for this project
|
||||
const detailsRows = await TrainingProjectDetails.findAll({ where: { project_id: projectId } });
|
||||
const detailsIds = detailsRows.map(d => d.id);
|
||||
// Delete all trainings linked to these details
|
||||
if (detailsIds.length > 0) {
|
||||
await Training.destroy({ where: { project_details_id: detailsIds } });
|
||||
await TrainingProjectDetails.destroy({ where: { project_id: projectId } });
|
||||
}
|
||||
// Delete the project itself
|
||||
const deleted = await TrainingProject.destroy({ where: { project_id: projectId } });
|
||||
if (deleted) {
|
||||
res.json({ message: 'Training project and all related entries deleted' });
|
||||
} else {
|
||||
res.status(404).json({ message: 'Training project not found' });
|
||||
}
|
||||
} catch (error) {
|
||||
res.status(500).json({ message: 'Failed to delete training project', error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
const express = require('express');
|
||||
const multer = require('multer');
|
||||
const upload = multer();
|
||||
const TrainingProject = require('../models/TrainingProject.js');
|
||||
const LabelStudioProject = require('../models/LabelStudioProject.js')
|
||||
const { seedLabelStudio, updateStatus } = require('../services/seed-label-studio.js');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const {generateTrainingJson} = require('../services/generate-json-yolox.js')
|
||||
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
// Ensure JSON bodies are parsed for all routes
|
||||
router.use(express.json());
|
||||
|
||||
router.get('/seed', async (req, res) => {
|
||||
const result = await seedLabelStudio();
|
||||
res.json(result);
|
||||
});
|
||||
|
||||
|
||||
|
||||
// Trigger generate-json-yolox.js
|
||||
|
||||
router.post('/generate-yolox-json', async (req, res) => {
|
||||
const { project_id } = req.body;
|
||||
if (!project_id) {
|
||||
return res.status(400).json({ message: 'Missing project_id in request body' });
|
||||
}
|
||||
try {
|
||||
// Generate COCO JSONs
|
||||
// Find all TrainingProjectDetails for this project
|
||||
const TrainingProjectDetails = require('../models/TrainingProjectDetails.js');
|
||||
const detailsRows = await TrainingProjectDetails.findAll({ where: { project_id } });
|
||||
if (!detailsRows || detailsRows.length === 0) {
|
||||
return res.status(404).json({ message: 'No TrainingProjectDetails found for project ' + project_id });
|
||||
}
|
||||
// For each details row, generate coco.jsons and exp.py in projectfolder/project_details_id
|
||||
const Training = require('../models/training.js');
|
||||
const { saveYoloxExp } = require('../services/generate-yolox-exp.js');
|
||||
const TrainingProject = require('../models/TrainingProject.js');
|
||||
const trainingProject = await TrainingProject.findByPk(project_id);
|
||||
const projectName = trainingProject.name ? trainingProject.name.replace(/\s+/g, '_') : `project_${project_id}`;
|
||||
for (const details of detailsRows) {
|
||||
const detailsId = details.id;
|
||||
await generateTrainingJson(detailsId);
|
||||
const trainings = await Training.findAll({ where: { project_details_id: detailsId } });
|
||||
if (trainings.length === 0) continue;
|
||||
// For each training, save exp.py in projectfolder/project_details_id
|
||||
const outDir = path.join(__dirname, '..', projectName, String(detailsId));
|
||||
if (!fs.existsSync(outDir)) fs.mkdirSync(outDir, { recursive: true });
|
||||
for (const training of trainings) {
|
||||
const expFilePath = path.join(outDir, 'exp.py');
|
||||
await saveYoloxExp(training.id, expFilePath);
|
||||
}
|
||||
}
|
||||
|
||||
// Find all trainings for this project
|
||||
// ...existing code...
|
||||
res.json({ message: 'YOLOX JSON and exp.py generated for project ' + project_id });
|
||||
} catch (err) {
|
||||
console.error('Error generating YOLOX JSON:', err);
|
||||
res.status(500).json({ message: 'Failed to generate YOLOX JSON', error: err.message });
|
||||
}
|
||||
});
|
||||
|
||||
// Start YOLOX training
|
||||
const { spawn } = require('child_process');
|
||||
router.post('/start-yolox-training', async (req, res) => {
|
||||
try {
|
||||
const { project_id, training_id } = req.body;
|
||||
// Get project name
|
||||
const trainingProject = await TrainingProject.findByPk(project_id);
|
||||
const projectName = trainingProject.name ? trainingProject.name.replace(/\s+/g, '_') : `project_${project_id}`;
|
||||
// Look up training row by id or project_details_id
|
||||
const Training = require('../models/training.js');
|
||||
let trainingRow = await Training.findByPk(training_id);
|
||||
if (!trainingRow) {
|
||||
trainingRow = await Training.findOne({ where: { project_details_id: training_id } });
|
||||
}
|
||||
if (!trainingRow) {
|
||||
return res.status(404).json({ error: `Training row not found for id or project_details_id ${training_id}` });
|
||||
}
|
||||
const project_details_id = trainingRow.project_details_id;
|
||||
// Use the generated exp.py from the correct project folder
|
||||
const outDir = path.join(__dirname, '..', projectName, String(project_details_id));
|
||||
const yoloxMainDir = '/home/kitraining/Yolox/YOLOX-main';
|
||||
const expSrc = path.join(outDir, 'exp.py');
|
||||
if (!fs.existsSync(expSrc)) {
|
||||
return res.status(500).json({ error: `exp.py not found at ${expSrc}` });
|
||||
}
|
||||
// Source venv and run YOLOX training in YOLOX-main folder
|
||||
const yoloxVenv = '/home/kitraining/Yolox/yolox_venv/bin/activate';
|
||||
// Determine model argument based on selected_model and transfer_learning
|
||||
let modelArg = '';
|
||||
let cmd = '';
|
||||
if (
|
||||
trainingRow.transfer_learning &&
|
||||
typeof trainingRow.transfer_learning === 'string' &&
|
||||
trainingRow.transfer_learning.toLowerCase() === 'coco'
|
||||
) {
|
||||
// If transfer_learning is 'coco', add -o and modelArg
|
||||
modelArg = ` -c /home/kitraining/Yolox/YOLOX-main/pretrained/${trainingRow.selected_model}`;
|
||||
cmd = `bash -c 'source ${yoloxVenv} && python tools/train.py -f ${expSrc} -d 1 -b 8 --fp16 -o ${modelArg}.pth --cache'`;
|
||||
} else if (
|
||||
trainingRow.selected_model &&
|
||||
trainingRow.selected_model.toLowerCase() === 'coco' &&
|
||||
(!trainingRow.transfer_learning || trainingRow.transfer_learning === false)
|
||||
) {
|
||||
// If selected_model is 'coco' and not transfer_learning, add modelArg only
|
||||
modelArg = ` -c /pretrained/${trainingRow.selected_model}`;
|
||||
cmd = `bash -c 'source ${yoloxVenv} && python tools/train.py -f ${expSrc} -d 1 -b 8 --fp16 -o ${modelArg}.pth --cache'`;
|
||||
} else {
|
||||
// Default: no modelArg
|
||||
cmd = `bash -c 'source ${yoloxVenv} && python tools/train.py -f ${expSrc} -d 1 -b 8 --fp16' --cache`;
|
||||
}
|
||||
console.log(cmd)
|
||||
const child = spawn(cmd, { shell: true, cwd: yoloxMainDir });
|
||||
child.stdout.pipe(process.stdout);
|
||||
child.stderr.pipe(process.stderr);
|
||||
|
||||
res.json({ message: 'Training started' });
|
||||
} catch (err) {
|
||||
res.status(500).json({ error: 'Failed to start training', details: err.message });
|
||||
}
|
||||
});
|
||||
|
||||
// Get YOLOX training log
|
||||
router.get('/training-log', async (req, res) => {
|
||||
try {
|
||||
const { project_id, training_id } = req.query;
|
||||
const trainingProject = await TrainingProject.findByPk(project_id);
|
||||
const projectName = trainingProject.name ? trainingProject.name.replace(/\s+/g, '_') : `project_${project_id}`;
|
||||
const outDir = path.join(__dirname, '..', projectName, String(training_id));
|
||||
const logPath = path.join(outDir, 'training.log');
|
||||
if (!fs.existsSync(logPath)) {
|
||||
return res.status(404).json({ error: 'Log not found' });
|
||||
}
|
||||
const logData = fs.readFileSync(logPath, 'utf8');
|
||||
res.json({ log: logData });
|
||||
} catch (err) {
|
||||
res.status(500).json({ error: 'Failed to fetch log', details: err.message });
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/training-projects', upload.single('project_image'), async (req, res) => {
|
||||
try {
|
||||
const { title, description } = req.body;
|
||||
const classes = JSON.parse(req.body.classes);
|
||||
const project_image = req.file ? req.file.buffer : null;
|
||||
const project_image_type = req.file ? req.file.mimetype : null;
|
||||
await TrainingProject.create({
|
||||
title,
|
||||
description,
|
||||
classes,
|
||||
project_image,
|
||||
project_image_type
|
||||
});
|
||||
res.json({ message: 'Project created!' });
|
||||
} catch (error) {
|
||||
console.error('Error creating project:', error);
|
||||
res.status(500).json({ message: 'Failed to create project', error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
router.get('/training-projects', async (req, res) => {
|
||||
try {
|
||||
const projects = await TrainingProject.findAll();
|
||||
// Convert BLOB to base64 data URL for each project
|
||||
const serialized = projects.map(project => {
|
||||
const plain = project.get({ plain: true });
|
||||
if (plain.project_image) {
|
||||
const base64 = Buffer.from(plain.project_image).toString('base64');
|
||||
const mimeType = plain.project_image_type || 'image/png';
|
||||
plain.project_image = `data:${mimeType};base64,${base64}`;
|
||||
}
|
||||
return plain;
|
||||
});
|
||||
res.json(serialized);
|
||||
} catch (error) {
|
||||
res.status(500).json({ message: 'Failed to fetch projects', error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
router.get('/update-status', async (req, res) => {
|
||||
res.json(updateStatus)
|
||||
})
|
||||
|
||||
router.get('/label-studio-projects', async (req, res) => {
|
||||
try {
|
||||
const LabelStudioProject = require('../models/LabelStudioProject.js');
|
||||
const Image = require('../models/Images.js');
|
||||
const Annotation = require('../models/Annotation.js');
|
||||
const labelStudioProjects = await LabelStudioProject.findAll();
|
||||
const projectsWithCounts = await Promise.all(labelStudioProjects.map(async project => {
|
||||
const plain = project.get({ plain: true });
|
||||
// Get all images for this project
|
||||
const images = await Image.findAll({ where: { project_id: plain.project_id } });
|
||||
let annotationCounts = {};
|
||||
if (images.length > 0) {
|
||||
const imageIds = images.map(img => img.image_id);
|
||||
// Get all annotations for these images
|
||||
const annotations = await Annotation.findAll({ where: { image_id: imageIds } });
|
||||
// Count by label
|
||||
for (const ann of annotations) {
|
||||
const label = ann.Label;
|
||||
annotationCounts[label] = (annotationCounts[label] || 0) + 1;
|
||||
}
|
||||
}
|
||||
plain.annotationCounts = annotationCounts;
|
||||
return plain;
|
||||
}));
|
||||
res.json(projectsWithCounts);
|
||||
} catch (error) {
|
||||
res.status(500).json({ message: 'Failed to fetch projects', error: error.message });
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
// POST endpoint to create TrainingProjectDetails with all fields
|
||||
router.post('/training-project-details', async (req, res) => {
|
||||
try {
|
||||
const {
|
||||
project_id,
|
||||
annotation_projects,
|
||||
class_map,
|
||||
description
|
||||
} = req.body;
|
||||
if (!project_id || !annotation_projects) {
|
||||
return res.status(400).json({ message: 'Missing required fields' });
|
||||
}
|
||||
const TrainingProjectDetails = require('../models/TrainingProjectDetails.js');
|
||||
const created = await TrainingProjectDetails.create({
|
||||
project_id,
|
||||
annotation_projects,
|
||||
class_map: class_map || null,
|
||||
description: description || null
|
||||
});
|
||||
res.json({ message: 'TrainingProjectDetails created', details: created });
|
||||
} catch (error) {
|
||||
res.status(500).json({ message: 'Failed to create TrainingProjectDetails', error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// GET endpoint to fetch all TrainingProjectDetails
|
||||
router.get('/training-project-details', async (req, res) => {
|
||||
try {
|
||||
const TrainingProjectDetails = require('../models/TrainingProjectDetails.js');
|
||||
const details = await TrainingProjectDetails.findAll();
|
||||
res.json(details);
|
||||
} catch (error) {
|
||||
res.status(500).json({ message: 'Failed to fetch TrainingProjectDetails', error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// PUT endpoint to update class_map and description in TrainingProjectDetails
|
||||
router.put('/training-project-details', async (req, res) => {
|
||||
try {
|
||||
const { project_id, class_map, description } = req.body;
|
||||
if (!project_id || !class_map || !description) {
|
||||
return res.status(400).json({ message: 'Missing required fields' });
|
||||
}
|
||||
const TrainingProjectDetails = require('../models/TrainingProjectDetails.js');
|
||||
const details = await TrainingProjectDetails.findOne({ where: { project_id } });
|
||||
if (!details) {
|
||||
return res.status(404).json({ message: 'TrainingProjectDetails not found' });
|
||||
}
|
||||
details.class_map = class_map;
|
||||
details.description = description;
|
||||
await details.save();
|
||||
res.json({ message: 'Class map and description updated', details });
|
||||
} catch (error) {
|
||||
res.status(500).json({ message: 'Failed to update class map or description', error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// POST endpoint to receive YOLOX settings and save to DB (handles multipart/form-data)
|
||||
router.post('/yolox-settings', upload.any(), async (req, res) => {
|
||||
try {
|
||||
const settings = req.body;
|
||||
// Debug: Log all received fields and types
|
||||
console.log('--- YOLOX settings received ---');
|
||||
console.log('settings:', settings);
|
||||
if (req.files && req.files.length > 0) {
|
||||
console.log('Files received:', req.files.map(f => ({ fieldname: f.fieldname, originalname: f.originalname, size: f.size })));
|
||||
}
|
||||
// Declare requiredFields once
|
||||
const requiredFields = ['project_details_id', 'exp_name', 'max_epoch', 'depth', 'width', 'activation', 'train', 'valid', 'test', 'selected_model', 'transfer_learning'];
|
||||
// Log types of required fields
|
||||
requiredFields.forEach(field => {
|
||||
console.log(`Field '${field}': value='${settings[field]}', type='${typeof settings[field]}'`);
|
||||
});
|
||||
// Map select_model to selected_model if present
|
||||
if (settings && settings.select_model && !settings.selected_model) {
|
||||
settings.selected_model = settings.select_model;
|
||||
delete settings.select_model;
|
||||
}
|
||||
// Lookup project_details_id from project_id
|
||||
if (!settings.project_id || isNaN(Number(settings.project_id))) {
|
||||
throw new Error('Missing or invalid project_id in request. Cannot assign training to a project.');
|
||||
}
|
||||
const TrainingProjectDetails = require('../models/TrainingProjectDetails.js');
|
||||
let details = await TrainingProjectDetails.findOne({ where: { project_id: settings.project_id } });
|
||||
if (!details) {
|
||||
details = await TrainingProjectDetails.create({
|
||||
project_id: settings.project_id,
|
||||
annotation_projects: [],
|
||||
class_map: null,
|
||||
description: null
|
||||
});
|
||||
}
|
||||
settings.project_details_id = details.id;
|
||||
// Map 'act' from frontend to 'activation' for DB
|
||||
if (settings.act !== undefined) {
|
||||
settings.activation = settings.act;
|
||||
delete settings.act;
|
||||
}
|
||||
// Type conversion for DB compatibility
|
||||
[
|
||||
'max_epoch', 'depth', 'width', 'warmup_epochs', 'warmup_lr', 'no_aug_epochs', 'min_lr_ratio', 'weight_decay', 'momentum', 'print_interval', 'eval_interval', 'test_conf', 'nmsthre', 'multiscale_range', 'degrees', 'translate', 'shear', 'train', 'valid', 'test'
|
||||
].forEach(f => {
|
||||
if (settings[f] !== undefined) settings[f] = Number(settings[f]);
|
||||
});
|
||||
// Improved boolean conversion
|
||||
['ema', 'enable_mixup', 'save_history_ckpt'].forEach(f => {
|
||||
if (settings[f] !== undefined) {
|
||||
if (typeof settings[f] === 'string') {
|
||||
settings[f] = settings[f].toLowerCase() === 'true';
|
||||
} else {
|
||||
settings[f] = Boolean(settings[f]);
|
||||
}
|
||||
}
|
||||
});
|
||||
// Improved array conversion
|
||||
['mosaic_scale', 'mixup_scale', 'scale'].forEach(f => {
|
||||
if (settings[f] && typeof settings[f] === 'string') {
|
||||
settings[f] = settings[f]
|
||||
.split(',')
|
||||
.map(s => Number(s.trim()))
|
||||
.filter(n => !isNaN(n));
|
||||
}
|
||||
});
|
||||
// Trim all string fields
|
||||
Object.keys(settings).forEach(f => {
|
||||
if (typeof settings[f] === 'string') settings[f] = settings[f].trim();
|
||||
});
|
||||
// Set default for transfer_learning if missing
|
||||
if (settings.transfer_learning === undefined) settings.transfer_learning = false;
|
||||
// Convert empty string seed to null
|
||||
if ('seed' in settings && (settings.seed === '' || settings.seed === undefined)) {
|
||||
settings.seed = null;
|
||||
}
|
||||
// Validate required fields for training table
|
||||
for (const field of requiredFields) {
|
||||
if (settings[field] === undefined || settings[field] === null || settings[field] === '') {
|
||||
console.error('Missing required field:', field, 'Value:', settings[field]);
|
||||
throw new Error('Missing required field: ' + field);
|
||||
}
|
||||
}
|
||||
console.log('Received YOLOX settings:', settings);
|
||||
// Handle uploaded model file (ckpt_upload)
|
||||
if (req.files && req.files.length > 0) {
|
||||
const ckptFile = req.files.find(f => f.fieldname === 'ckpt_upload');
|
||||
if (ckptFile) {
|
||||
const uploadDir = path.join(__dirname, '..', 'uploads');
|
||||
if (!fs.existsSync(uploadDir)) fs.mkdirSync(uploadDir);
|
||||
const filename = ckptFile.originalname || `uploaded_model_${settings.project_id}.pth`;
|
||||
const filePath = path.join(uploadDir, filename);
|
||||
fs.writeFileSync(filePath, ckptFile.buffer);
|
||||
settings.model_upload = filePath;
|
||||
}
|
||||
}
|
||||
// Save settings to DB only (no file)
|
||||
const { pushYoloxExpToDb } = require('../services/push-yolox-exp.js');
|
||||
const training = await pushYoloxExpToDb(settings);
|
||||
res.json({ message: 'YOLOX settings saved to DB', training });
|
||||
} catch (error) {
|
||||
console.error('Error in /api/yolox-settings:', error.stack || error);
|
||||
res.status(500).json({ message: 'Failed to save YOLOX settings', error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// POST endpoint to receive binary model file and save to disk (not DB)
|
||||
router.post('/yolox-settings/upload', async (req, res) => {
|
||||
try {
|
||||
const projectId = req.query.project_id;
|
||||
if (!projectId) return res.status(400).json({ message: 'Missing project_id in query' });
|
||||
// Save file to disk
|
||||
const uploadDir = path.join(__dirname, '..', 'uploads');
|
||||
if (!fs.existsSync(uploadDir)) fs.mkdirSync(uploadDir);
|
||||
const filename = req.headers['x-upload-filename'] || `uploaded_model_${projectId}.pth`;
|
||||
const filePath = path.join(uploadDir, filename);
|
||||
const chunks = [];
|
||||
req.on('data', chunk => chunks.push(chunk));
|
||||
req.on('end', async () => {
|
||||
const buffer = Buffer.concat(chunks);
|
||||
fs.writeFile(filePath, buffer, async err => {
|
||||
if (err) {
|
||||
console.error('Error saving file:', err);
|
||||
return res.status(500).json({ message: 'Failed to save model file', error: err.message });
|
||||
}
|
||||
// Update latest training row for this project with file path
|
||||
try {
|
||||
const TrainingProjectDetails = require('../models/TrainingProjectDetails.js');
|
||||
const Training = require('../models/training.js');
|
||||
// Find details row for this project
|
||||
const details = await TrainingProjectDetails.findOne({ where: { project_id: projectId } });
|
||||
if (!details) return res.status(404).json({ message: 'No TrainingProjectDetails found for project_id' });
|
||||
// Find latest training for this details row
|
||||
const training = await Training.findOne({ where: { project_details_id: details.id }, order: [['createdAt', 'DESC']] });
|
||||
if (!training) return res.status(404).json({ message: 'No training found for project_id' });
|
||||
// Save file path to model_upload field
|
||||
training.model_upload = filePath;
|
||||
await training.save();
|
||||
res.json({ message: 'Model file uploaded and saved to disk', filename, trainingId: training.id });
|
||||
} catch (dbErr) {
|
||||
console.error('Error updating training with file path:', dbErr);
|
||||
res.status(500).json({ message: 'File saved but failed to update training row', error: dbErr.message });
|
||||
}
|
||||
});
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error in /api/yolox-settings/upload:', error.stack || error);
|
||||
res.status(500).json({ message: 'Failed to upload model file', error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// GET endpoint to fetch all trainings (optionally filtered by project_id)
|
||||
router.get('/trainings', async (req, res) => {
|
||||
try {
|
||||
const project_id = req.query.project_id;
|
||||
const TrainingProjectDetails = require('../models/TrainingProjectDetails.js');
|
||||
const Training = require('../models/training.js');
|
||||
if (project_id) {
|
||||
// Find all details rows for this project
|
||||
const detailsRows = await TrainingProjectDetails.findAll({ where: { project_id } });
|
||||
if (!detailsRows || detailsRows.length === 0) return res.json([]);
|
||||
// Get all trainings linked to any details row for this project
|
||||
const detailsIds = detailsRows.map(d => d.id);
|
||||
const trainings = await Training.findAll({ where: { project_details_id: detailsIds } });
|
||||
return res.json(trainings);
|
||||
} else {
|
||||
// Return all trainings if no project_id is specified
|
||||
const trainings = await Training.findAll();
|
||||
return res.json(trainings);
|
||||
}
|
||||
} catch (error) {
|
||||
res.status(500).json({ message: 'Failed to fetch trainings', error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// DELETE endpoint to remove a training by id
|
||||
router.delete('/trainings/:id', async (req, res) => {
|
||||
try {
|
||||
const Training = require('../models/training.js');
|
||||
const id = req.params.id;
|
||||
const deleted = await Training.destroy({ where: { id } });
|
||||
if (deleted) {
|
||||
res.json({ message: 'Training deleted' });
|
||||
} else {
|
||||
res.status(404).json({ message: 'Training not found' });
|
||||
}
|
||||
} catch (error) {
|
||||
res.status(500).json({ message: 'Failed to delete training', error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// DELETE endpoint to remove a training project and all related entries
|
||||
router.delete('/training-projects/:id', async (req, res) => {
|
||||
try {
|
||||
const projectId = req.params.id;
|
||||
const TrainingProject = require('../models/TrainingProject.js');
|
||||
const TrainingProjectDetails = require('../models/TrainingProjectDetails.js');
|
||||
const Training = require('../models/training.js');
|
||||
// Find details row(s) for this project
|
||||
const detailsRows = await TrainingProjectDetails.findAll({ where: { project_id: projectId } });
|
||||
const detailsIds = detailsRows.map(d => d.id);
|
||||
// Delete all trainings linked to these details
|
||||
if (detailsIds.length > 0) {
|
||||
await Training.destroy({ where: { project_details_id: detailsIds } });
|
||||
await TrainingProjectDetails.destroy({ where: { project_id: projectId } });
|
||||
}
|
||||
// Delete the project itself
|
||||
const deleted = await TrainingProject.destroy({ where: { project_id: projectId } });
|
||||
if (deleted) {
|
||||
res.json({ message: 'Training project and all related entries deleted' });
|
||||
} else {
|
||||
res.status(404).json({ message: 'Training project not found' });
|
||||
}
|
||||
} catch (error) {
|
||||
res.status(500).json({ message: 'Failed to delete training project', error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,34 +1,34 @@
|
||||
const express = require('express');
|
||||
const cors = require('cors');
|
||||
const path = require('path');
|
||||
const sequelize = require('./database/database');
|
||||
|
||||
|
||||
const app = express();
|
||||
app.use(express.json());
|
||||
const port = 3000;
|
||||
|
||||
const apiRouter = require('./routes/api.js');
|
||||
app.use('/api', apiRouter);
|
||||
|
||||
|
||||
app.use(cors());
|
||||
app.use(express.json());
|
||||
app.use(express.static(path.join(__dirname, '..')));
|
||||
|
||||
|
||||
|
||||
// Initialize DB and start server
|
||||
(async () => {
|
||||
try {
|
||||
await sequelize.authenticate();
|
||||
console.log('DB connection established.');
|
||||
await sequelize.sync(); // Only if you want Sequelize to ensure schema matches
|
||||
|
||||
app.listen(port, '0.0.0.0', () =>
|
||||
console.log(`Server running at http://0.0.0.0:${port}`)
|
||||
);
|
||||
} catch (err) {
|
||||
console.error('Failed to start:', err);
|
||||
}
|
||||
})();
|
||||
const express = require('express');
|
||||
const cors = require('cors');
|
||||
const path = require('path');
|
||||
const sequelize = require('./database/database');
|
||||
|
||||
|
||||
const app = express();
|
||||
app.use(express.json());
|
||||
const port = 3000;
|
||||
|
||||
const apiRouter = require('./routes/api.js');
|
||||
app.use('/api', apiRouter);
|
||||
|
||||
|
||||
app.use(cors());
|
||||
app.use(express.json());
|
||||
app.use(express.static(path.join(__dirname, '..')));
|
||||
|
||||
|
||||
|
||||
// Initialize DB and start server
|
||||
(async () => {
|
||||
try {
|
||||
await sequelize.authenticate();
|
||||
console.log('DB connection established.');
|
||||
await sequelize.sync(); // Only if you want Sequelize to ensure schema matches
|
||||
|
||||
app.listen(port, '0.0.0.0', () =>
|
||||
console.log(`Server running at http://0.0.0.0:${port}`)
|
||||
);
|
||||
} catch (err) {
|
||||
console.error('Failed to start:', err);
|
||||
}
|
||||
})();
|
||||
|
||||
@@ -1 +1 @@
|
||||
# Services module
|
||||
# Services module
|
||||
|
||||
@@ -1,92 +1,92 @@
|
||||
const API_URL = 'http://192.168.1.19:8080/api';
|
||||
const API_TOKEN = 'c1cef980b7c73004f4ee880a42839313b863869f';
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
const fetch = require('node-fetch');
|
||||
|
||||
async function fetchLableStudioProject(projectid) {
|
||||
// 1. Trigger export
|
||||
const exportUrl = `${API_URL}/projects/${projectid}/export?exportType=JSON_MIN`;
|
||||
const headers = { Authorization: `Token ${API_TOKEN}` };
|
||||
let res = await fetch(exportUrl, { headers });
|
||||
if (!res.ok) {
|
||||
let errorText = await res.text().catch(() => '');
|
||||
console.error(`Failed to trigger export: ${res.status} ${res.statusText} - ${errorText}`);
|
||||
throw new Error(`Failed to trigger export: ${res.status} ${res.statusText}`);
|
||||
}
|
||||
let data = await res.json();
|
||||
// If data is an array, it's ready
|
||||
if (Array.isArray(data)) return data;
|
||||
// If not, poll for the export file
|
||||
let fileUrl = data.download_url || data.url || null;
|
||||
let tries = 0;
|
||||
while (!fileUrl && tries < 20) {
|
||||
await new Promise(r => setTimeout(r, 2000));
|
||||
res = await fetch(exportUrl, { headers });
|
||||
if (!res.ok) {
|
||||
let errorText = await res.text().catch(() => '');
|
||||
console.error(`Failed to poll export: ${res.status} ${res.statusText} - ${errorText}`);
|
||||
throw new Error(`Failed to poll export: ${res.status} ${res.statusText}`);
|
||||
}
|
||||
data = await res.json();
|
||||
fileUrl = data.download_url || data.url || null;
|
||||
tries++;
|
||||
}
|
||||
if (!fileUrl) throw new Error('Label Studio export did not become ready');
|
||||
// 2. Download the export file
|
||||
res = await fetch(fileUrl.startsWith('http') ? fileUrl : `${API_URL.replace('/api','')}${fileUrl}`, { headers });
|
||||
if (!res.ok) {
|
||||
let errorText = await res.text().catch(() => '');
|
||||
console.error(`Failed to download export: ${res.status} ${res.statusText} - ${errorText}`);
|
||||
throw new Error(`Failed to download export: ${res.status} ${res.statusText}`);
|
||||
}
|
||||
return await res.json();
|
||||
}
|
||||
|
||||
|
||||
|
||||
async function fetchProjectIdsAndTitles() {
|
||||
try {
|
||||
const response = await fetch(`${API_URL}/projects/`, {
|
||||
headers: {
|
||||
'Authorization': `Token ${API_TOKEN}`,
|
||||
'Content-Type': 'application/json'
|
||||
}
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
let errorText = await response.text().catch(() => '');
|
||||
console.error(`Failed to fetch projects: ${response.status} ${response.statusText} - ${errorText}`);
|
||||
throw new Error(`HTTP error! status: ${response.status}`);
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
|
||||
if (!data.results || !Array.isArray(data.results)) {
|
||||
throw new Error('API response does not contain results array');
|
||||
}
|
||||
|
||||
// Extract id and title from each project
|
||||
const projects = data.results.map(project => ({
|
||||
id: project.id,
|
||||
title: project.title
|
||||
}));
|
||||
console.log(projects)
|
||||
return projects;
|
||||
|
||||
} catch (error) {
|
||||
console.error('Failed to fetch projects:', error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { fetchLableStudioProject, fetchProjectIdsAndTitles };
|
||||
|
||||
|
||||
|
||||
//getLableStudioProject(20)
|
||||
//fetchProjectIdsAndTitles()
|
||||
const API_URL = 'http://192.168.1.19:8080/api';
|
||||
const API_TOKEN = 'c1cef980b7c73004f4ee880a42839313b863869f';
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
const fetch = require('node-fetch');
|
||||
|
||||
async function fetchLableStudioProject(projectid) {
|
||||
// 1. Trigger export
|
||||
const exportUrl = `${API_URL}/projects/${projectid}/export?exportType=JSON_MIN`;
|
||||
const headers = { Authorization: `Token ${API_TOKEN}` };
|
||||
let res = await fetch(exportUrl, { headers });
|
||||
if (!res.ok) {
|
||||
let errorText = await res.text().catch(() => '');
|
||||
console.error(`Failed to trigger export: ${res.status} ${res.statusText} - ${errorText}`);
|
||||
throw new Error(`Failed to trigger export: ${res.status} ${res.statusText}`);
|
||||
}
|
||||
let data = await res.json();
|
||||
// If data is an array, it's ready
|
||||
if (Array.isArray(data)) return data;
|
||||
// If not, poll for the export file
|
||||
let fileUrl = data.download_url || data.url || null;
|
||||
let tries = 0;
|
||||
while (!fileUrl && tries < 20) {
|
||||
await new Promise(r => setTimeout(r, 2000));
|
||||
res = await fetch(exportUrl, { headers });
|
||||
if (!res.ok) {
|
||||
let errorText = await res.text().catch(() => '');
|
||||
console.error(`Failed to poll export: ${res.status} ${res.statusText} - ${errorText}`);
|
||||
throw new Error(`Failed to poll export: ${res.status} ${res.statusText}`);
|
||||
}
|
||||
data = await res.json();
|
||||
fileUrl = data.download_url || data.url || null;
|
||||
tries++;
|
||||
}
|
||||
if (!fileUrl) throw new Error('Label Studio export did not become ready');
|
||||
// 2. Download the export file
|
||||
res = await fetch(fileUrl.startsWith('http') ? fileUrl : `${API_URL.replace('/api','')}${fileUrl}`, { headers });
|
||||
if (!res.ok) {
|
||||
let errorText = await res.text().catch(() => '');
|
||||
console.error(`Failed to download export: ${res.status} ${res.statusText} - ${errorText}`);
|
||||
throw new Error(`Failed to download export: ${res.status} ${res.statusText}`);
|
||||
}
|
||||
return await res.json();
|
||||
}
|
||||
|
||||
|
||||
|
||||
async function fetchProjectIdsAndTitles() {
|
||||
try {
|
||||
const response = await fetch(`${API_URL}/projects/`, {
|
||||
headers: {
|
||||
'Authorization': `Token ${API_TOKEN}`,
|
||||
'Content-Type': 'application/json'
|
||||
}
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
let errorText = await response.text().catch(() => '');
|
||||
console.error(`Failed to fetch projects: ${response.status} ${response.statusText} - ${errorText}`);
|
||||
throw new Error(`HTTP error! status: ${response.status}`);
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
|
||||
if (!data.results || !Array.isArray(data.results)) {
|
||||
throw new Error('API response does not contain results array');
|
||||
}
|
||||
|
||||
// Extract id and title from each project
|
||||
const projects = data.results.map(project => ({
|
||||
id: project.id,
|
||||
title: project.title
|
||||
}));
|
||||
console.log(projects)
|
||||
return projects;
|
||||
|
||||
} catch (error) {
|
||||
console.error('Failed to fetch projects:', error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { fetchLableStudioProject, fetchProjectIdsAndTitles };
|
||||
|
||||
|
||||
|
||||
//getLableStudioProject(20)
|
||||
//fetchProjectIdsAndTitles()
|
||||
|
||||
@@ -1,85 +1,93 @@
|
||||
import requests
|
||||
import time
|
||||
|
||||
API_URL = 'http://192.168.1.19:8080/api'
|
||||
API_TOKEN = 'c1cef980b7c73004f4ee880a42839313b863869f'
|
||||
|
||||
def fetch_label_studio_project(project_id):
|
||||
"""Fetch Label Studio project annotations"""
|
||||
export_url = f'{API_URL}/projects/{project_id}/export?exportType=JSON_MIN'
|
||||
headers = {'Authorization': f'Token {API_TOKEN}'}
|
||||
|
||||
# Trigger export
|
||||
res = requests.get(export_url, headers=headers)
|
||||
if not res.ok:
|
||||
error_text = res.text if res.text else ''
|
||||
print(f'Failed to trigger export: {res.status_code} {res.reason} - {error_text}')
|
||||
raise Exception(f'Failed to trigger export: {res.status_code} {res.reason}')
|
||||
|
||||
data = res.json()
|
||||
|
||||
# If data is an array, it's ready
|
||||
if isinstance(data, list):
|
||||
return data
|
||||
|
||||
# If not, poll for the export file
|
||||
file_url = data.get('download_url') or data.get('url')
|
||||
tries = 0
|
||||
|
||||
while not file_url and tries < 20:
|
||||
time.sleep(2)
|
||||
res = requests.get(export_url, headers=headers)
|
||||
if not res.ok:
|
||||
error_text = res.text if res.text else ''
|
||||
print(f'Failed to poll export: {res.status_code} {res.reason} - {error_text}')
|
||||
raise Exception(f'Failed to poll export: {res.status_code} {res.reason}')
|
||||
|
||||
data = res.json()
|
||||
file_url = data.get('download_url') or data.get('url')
|
||||
tries += 1
|
||||
|
||||
if not file_url:
|
||||
raise Exception('Label Studio export did not become ready')
|
||||
|
||||
# Download the export file
|
||||
full_url = file_url if file_url.startswith('http') else f"{API_URL.replace('/api', '')}{file_url}"
|
||||
res = requests.get(full_url, headers=headers)
|
||||
if not res.ok:
|
||||
error_text = res.text if res.text else ''
|
||||
print(f'Failed to download export: {res.status_code} {res.reason} - {error_text}')
|
||||
raise Exception(f'Failed to download export: {res.status_code} {res.reason}')
|
||||
|
||||
return res.json()
|
||||
|
||||
def fetch_project_ids_and_titles():
|
||||
"""Fetch all Label Studio project IDs and titles"""
|
||||
try:
|
||||
response = requests.get(
|
||||
f'{API_URL}/projects/',
|
||||
headers={
|
||||
'Authorization': f'Token {API_TOKEN}',
|
||||
'Content-Type': 'application/json'
|
||||
}
|
||||
)
|
||||
|
||||
if not response.ok:
|
||||
error_text = response.text if response.text else ''
|
||||
print(f'Failed to fetch projects: {response.status_code} {response.reason} - {error_text}')
|
||||
raise Exception(f'HTTP error! status: {response.status_code}')
|
||||
|
||||
data = response.json()
|
||||
|
||||
if 'results' not in data or not isinstance(data['results'], list):
|
||||
raise Exception('API response does not contain results array')
|
||||
|
||||
# Extract id and title from each project
|
||||
projects = [
|
||||
{'id': project['id'], 'title': project['title']}
|
||||
for project in data['results']
|
||||
]
|
||||
print(projects)
|
||||
return projects
|
||||
|
||||
except Exception as error:
|
||||
print(f'Failed to fetch projects: {error}')
|
||||
return []
|
||||
import requests
|
||||
import time
|
||||
from services.settings_service import get_setting
|
||||
|
||||
def get_api_credentials():
|
||||
"""Get Label Studio API credentials from settings"""
|
||||
api_url = get_setting('labelstudio_api_url', 'http://192.168.1.19:8080/api')
|
||||
api_token = get_setting('labelstudio_api_token', 'c1cef980b7c73004f4ee880a42839313b863869f')
|
||||
return api_url, api_token
|
||||
|
||||
def fetch_label_studio_project(project_id):
|
||||
"""Fetch Label Studio project annotations"""
|
||||
API_URL, API_TOKEN = get_api_credentials()
|
||||
|
||||
export_url = f'{API_URL}/projects/{project_id}/export?exportType=JSON_MIN'
|
||||
headers = {'Authorization': f'Token {API_TOKEN}'}
|
||||
|
||||
# Trigger export
|
||||
res = requests.get(export_url, headers=headers)
|
||||
if not res.ok:
|
||||
error_text = res.text if res.text else ''
|
||||
print(f'Failed to trigger export: {res.status_code} {res.reason} - {error_text}')
|
||||
raise Exception(f'Failed to trigger export: {res.status_code} {res.reason}')
|
||||
|
||||
data = res.json()
|
||||
|
||||
# If data is an array, it's ready
|
||||
if isinstance(data, list):
|
||||
return data
|
||||
|
||||
# If not, poll for the export file
|
||||
file_url = data.get('download_url') or data.get('url')
|
||||
tries = 0
|
||||
|
||||
while not file_url and tries < 20:
|
||||
time.sleep(2)
|
||||
res = requests.get(export_url, headers=headers)
|
||||
if not res.ok:
|
||||
error_text = res.text if res.text else ''
|
||||
print(f'Failed to poll export: {res.status_code} {res.reason} - {error_text}')
|
||||
raise Exception(f'Failed to poll export: {res.status_code} {res.reason}')
|
||||
|
||||
data = res.json()
|
||||
file_url = data.get('download_url') or data.get('url')
|
||||
tries += 1
|
||||
|
||||
if not file_url:
|
||||
raise Exception('Label Studio export did not become ready')
|
||||
|
||||
# Download the export file
|
||||
full_url = file_url if file_url.startswith('http') else f"{API_URL.replace('/api', '')}{file_url}"
|
||||
res = requests.get(full_url, headers=headers)
|
||||
if not res.ok:
|
||||
error_text = res.text if res.text else ''
|
||||
print(f'Failed to download export: {res.status_code} {res.reason} - {error_text}')
|
||||
raise Exception(f'Failed to download export: {res.status_code} {res.reason}')
|
||||
|
||||
return res.json()
|
||||
|
||||
def fetch_project_ids_and_titles():
|
||||
"""Fetch all Label Studio project IDs and titles"""
|
||||
API_URL, API_TOKEN = get_api_credentials()
|
||||
|
||||
try:
|
||||
response = requests.get(
|
||||
f'{API_URL}/projects/',
|
||||
headers={
|
||||
'Authorization': f'Token {API_TOKEN}',
|
||||
'Content-Type': 'application/json'
|
||||
}
|
||||
)
|
||||
|
||||
if not response.ok:
|
||||
error_text = response.text if response.text else ''
|
||||
print(f'Failed to fetch projects: {response.status_code} {response.reason} - {error_text}')
|
||||
raise Exception(f'HTTP error! status: {response.status_code}')
|
||||
|
||||
data = response.json()
|
||||
|
||||
if 'results' not in data or not isinstance(data['results'], list):
|
||||
raise Exception('API response does not contain results array')
|
||||
|
||||
# Extract id and title from each project
|
||||
projects = [
|
||||
{'id': project['id'], 'title': project['title']}
|
||||
for project in data['results']
|
||||
]
|
||||
print(projects)
|
||||
return projects
|
||||
|
||||
except Exception as error:
|
||||
print(f'Failed to fetch projects: {error}')
|
||||
return []
|
||||
|
||||
@@ -1,176 +1,176 @@
|
||||
const TrainingProject = require('../models/TrainingProject.js');
|
||||
const TrainingProjectDetails = require('../models/TrainingProjectDetails.js')
|
||||
const LabelStudioProject = require('../models/LabelStudioProject.js')
|
||||
const Annotation = require('../models/Annotation.js')
|
||||
const Images = require('../models/Images.js')
|
||||
const fs = require('fs');
|
||||
|
||||
|
||||
async function generateTrainingJson(trainingId){
|
||||
// trainingId is now project_details_id
|
||||
const trainingProjectDetails = await TrainingProjectDetails.findByPk(trainingId);
|
||||
if (!trainingProjectDetails) throw new Error('No TrainingProjectDetails found for project_details_id ' + trainingId);
|
||||
const detailsObj = trainingProjectDetails.get({ plain: true });
|
||||
// Get parent project for name
|
||||
const trainingProject = await TrainingProject.findByPk(detailsObj.project_id);
|
||||
// Get split percentages (assume they are stored as train_percent, valid_percent, test_percent)
|
||||
const trainPercent = detailsObj.train_percent || 85;
|
||||
const validPercent = detailsObj.valid_percent || 10;
|
||||
const testPercent = detailsObj.test_percent || 5;
|
||||
|
||||
let cocoImages = [];
|
||||
let cocoAnnotations = [];
|
||||
let cocoCategories = [];
|
||||
let categoryMap = {};
|
||||
let categoryId = 0;
|
||||
let imageid = 0;
|
||||
let annotationid = 0;
|
||||
|
||||
for (const cls of detailsObj.class_map) {
|
||||
const asgMap = [];
|
||||
const listAsg = cls[1];
|
||||
for(const asg of listAsg){
|
||||
asgMap.push ({ original: asg[0], mapped: asg[1] });
|
||||
// Build category list and mapping
|
||||
if (asg[1] && !(asg[1] in categoryMap)) {
|
||||
categoryMap[asg[1]] = categoryId;
|
||||
cocoCategories.push({ id: categoryId, name: asg[1], supercategory: '' });
|
||||
categoryId++;
|
||||
}
|
||||
}
|
||||
const images = await Images.findAll({ where: { project_id: cls[0] } });
|
||||
for(const image of images){
|
||||
imageid += 1;
|
||||
let fileName = image.image_path;
|
||||
if (fileName.includes('%20')) {
|
||||
fileName = fileName.replace(/%20/g, ' ');
|
||||
}
|
||||
if (fileName && fileName.startsWith('/data/local-files/?d=')) {
|
||||
fileName = fileName.replace('/data/local-files/?d=', '');
|
||||
fileName = fileName.replace('/home/kitraining/home/kitraining/', '');
|
||||
}
|
||||
if (fileName && fileName.startsWith('home/kitraining/To_Annotate/')) {
|
||||
fileName = fileName.replace('home/kitraining/To_Annotate/','');
|
||||
}
|
||||
// Get annotations for this image
|
||||
const annotations = await Annotation.findAll({ where: { image_id: image.image_id } });
|
||||
// Use image.width and image.height from DB (populated from original_width/original_height)
|
||||
cocoImages.push({
|
||||
id: imageid,
|
||||
file_name: fileName,
|
||||
width: image.width || 0,
|
||||
height: image.height || 0
|
||||
});
|
||||
for (const annotation of annotations) {
|
||||
// Translate class name using asgMap
|
||||
let mappedClass = annotation.Label;
|
||||
for (const mapEntry of asgMap) {
|
||||
if (annotation.Label === mapEntry.original) {
|
||||
mappedClass = mapEntry.mapped;
|
||||
break;
|
||||
}
|
||||
}
|
||||
// Only add annotation if mappedClass is valid
|
||||
if (mappedClass && mappedClass in categoryMap) {
|
||||
annotationid += 1;
|
||||
let area = 0;
|
||||
if (annotation.width && annotation.height) {
|
||||
area = annotation.width * annotation.height;
|
||||
}
|
||||
cocoAnnotations.push({
|
||||
id: annotationid,
|
||||
image_id: imageid,
|
||||
category_id: categoryMap[mappedClass],
|
||||
bbox: [annotation.x, annotation.y, annotation.width, annotation.height],
|
||||
area: area,
|
||||
iscrowd: annotation.iscrowd || 0
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Shuffle images for random split using seed
|
||||
function seededRandom(seed) {
|
||||
let x = Math.sin(seed++) * 10000;
|
||||
return x - Math.floor(x);
|
||||
}
|
||||
function shuffle(array, seed) {
|
||||
for (let i = array.length - 1; i > 0; i--) {
|
||||
const j = Math.floor(seededRandom(seed + i) * (i + 1));
|
||||
[array[i], array[j]] = [array[j], array[i]];
|
||||
}
|
||||
}
|
||||
// Use seed from detailsObj if present, else default to 42
|
||||
const splitSeed = detailsObj.seed !== undefined && detailsObj.seed !== null ? Number(detailsObj.seed) : 42;
|
||||
shuffle(cocoImages, splitSeed);
|
||||
|
||||
// Split images
|
||||
const totalImages = cocoImages.length;
|
||||
const trainCount = Math.floor(totalImages * trainPercent / 100);
|
||||
const validCount = Math.floor(totalImages * validPercent / 100);
|
||||
const testCount = totalImages - trainCount - validCount;
|
||||
|
||||
const trainImages = cocoImages.slice(0, trainCount);
|
||||
const validImages = cocoImages.slice(trainCount, trainCount + validCount);
|
||||
const testImages = cocoImages.slice(trainCount + validCount);
|
||||
|
||||
// Helper to get image ids for each split
|
||||
const trainImageIds = new Set(trainImages.map(img => img.id));
|
||||
const validImageIds = new Set(validImages.map(img => img.id));
|
||||
const testImageIds = new Set(testImages.map(img => img.id));
|
||||
|
||||
// Split annotations
|
||||
const trainAnnotations = cocoAnnotations.filter(ann => trainImageIds.has(ann.image_id));
|
||||
const validAnnotations = cocoAnnotations.filter(ann => validImageIds.has(ann.image_id));
|
||||
const testAnnotations = cocoAnnotations.filter(ann => testImageIds.has(ann.image_id));
|
||||
|
||||
// Build final COCO JSONs with info section
|
||||
const buildCocoJson = (images, annotations, categories) => ({
|
||||
images,
|
||||
annotations,
|
||||
categories
|
||||
});
|
||||
|
||||
// Build COCO JSONs with info section
|
||||
const trainJson = buildCocoJson(trainImages, trainAnnotations, cocoCategories);
|
||||
const validJson = buildCocoJson(validImages, validAnnotations, cocoCategories);
|
||||
const testJson = buildCocoJson(testImages, testAnnotations, cocoCategories);
|
||||
|
||||
// Create output directory: projectname/trainingid/annotations
|
||||
const projectName = trainingProject && trainingProject.name ? trainingProject.name.replace(/\s+/g, '_') : `project_${detailsObj.project_id}`;
|
||||
const outDir = `${projectName}/${trainingId}`;
|
||||
const annotationsDir = `/home/kitraining/To_Annotate/annotations`;
|
||||
if (!fs.existsSync(annotationsDir)) {
|
||||
fs.mkdirSync(annotationsDir, { recursive: true });
|
||||
}
|
||||
|
||||
// Write to files in the annotations directory
|
||||
const trainPath = `${annotationsDir}/coco_project_${trainingId}_train.json`;
|
||||
const validPath = `${annotationsDir}/coco_project_${trainingId}_valid.json`;
|
||||
const testPath = `${annotationsDir}/coco_project_${trainingId}_test.json`;
|
||||
fs.writeFileSync(trainPath, JSON.stringify(trainJson, null, 2));
|
||||
fs.writeFileSync(validPath, JSON.stringify(validJson, null, 2));
|
||||
fs.writeFileSync(testPath, JSON.stringify(testJson, null, 2));
|
||||
console.log(`COCO JSON splits written to ${annotationsDir} for trainingId ${trainingId}`);
|
||||
|
||||
|
||||
|
||||
// Also generate inference exp.py in the same output directory as exp.py (project folder in workspace)
|
||||
const { generateYoloxInferenceExp } = require('./generate-yolox-exp');
|
||||
const path = require('path');
|
||||
const projectFolder = path.join(__dirname, '..', projectName, String(trainingId));
|
||||
if (!fs.existsSync(projectFolder)) {
|
||||
fs.mkdirSync(projectFolder, { recursive: true });
|
||||
}
|
||||
const inferenceExpPath = path.join(projectFolder, 'exp_infer.py');
|
||||
generateYoloxInferenceExp(trainingId).then(expContent => {
|
||||
fs.writeFileSync(inferenceExpPath, expContent);
|
||||
console.log(`Inference exp.py written to ${inferenceExpPath}`);
|
||||
}).catch(err => {
|
||||
console.error('Failed to generate inference exp.py:', err);
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
const TrainingProject = require('../models/TrainingProject.js');
|
||||
const TrainingProjectDetails = require('../models/TrainingProjectDetails.js')
|
||||
const LabelStudioProject = require('../models/LabelStudioProject.js')
|
||||
const Annotation = require('../models/Annotation.js')
|
||||
const Images = require('../models/Images.js')
|
||||
const fs = require('fs');
|
||||
|
||||
|
||||
async function generateTrainingJson(trainingId){
|
||||
// trainingId is now project_details_id
|
||||
const trainingProjectDetails = await TrainingProjectDetails.findByPk(trainingId);
|
||||
if (!trainingProjectDetails) throw new Error('No TrainingProjectDetails found for project_details_id ' + trainingId);
|
||||
const detailsObj = trainingProjectDetails.get({ plain: true });
|
||||
// Get parent project for name
|
||||
const trainingProject = await TrainingProject.findByPk(detailsObj.project_id);
|
||||
// Get split percentages (assume they are stored as train_percent, valid_percent, test_percent)
|
||||
const trainPercent = detailsObj.train_percent || 85;
|
||||
const validPercent = detailsObj.valid_percent || 10;
|
||||
const testPercent = detailsObj.test_percent || 5;
|
||||
|
||||
let cocoImages = [];
|
||||
let cocoAnnotations = [];
|
||||
let cocoCategories = [];
|
||||
let categoryMap = {};
|
||||
let categoryId = 0;
|
||||
let imageid = 0;
|
||||
let annotationid = 0;
|
||||
|
||||
for (const cls of detailsObj.class_map) {
|
||||
const asgMap = [];
|
||||
const listAsg = cls[1];
|
||||
for(const asg of listAsg){
|
||||
asgMap.push ({ original: asg[0], mapped: asg[1] });
|
||||
// Build category list and mapping
|
||||
if (asg[1] && !(asg[1] in categoryMap)) {
|
||||
categoryMap[asg[1]] = categoryId;
|
||||
cocoCategories.push({ id: categoryId, name: asg[1], supercategory: '' });
|
||||
categoryId++;
|
||||
}
|
||||
}
|
||||
const images = await Images.findAll({ where: { project_id: cls[0] } });
|
||||
for(const image of images){
|
||||
imageid += 1;
|
||||
let fileName = image.image_path;
|
||||
if (fileName.includes('%20')) {
|
||||
fileName = fileName.replace(/%20/g, ' ');
|
||||
}
|
||||
if (fileName && fileName.startsWith('/data/local-files/?d=')) {
|
||||
fileName = fileName.replace('/data/local-files/?d=', '');
|
||||
fileName = fileName.replace('/home/kitraining/home/kitraining/', '');
|
||||
}
|
||||
if (fileName && fileName.startsWith('home/kitraining/To_Annotate/')) {
|
||||
fileName = fileName.replace('home/kitraining/To_Annotate/','');
|
||||
}
|
||||
// Get annotations for this image
|
||||
const annotations = await Annotation.findAll({ where: { image_id: image.image_id } });
|
||||
// Use image.width and image.height from DB (populated from original_width/original_height)
|
||||
cocoImages.push({
|
||||
id: imageid,
|
||||
file_name: fileName,
|
||||
width: image.width || 0,
|
||||
height: image.height || 0
|
||||
});
|
||||
for (const annotation of annotations) {
|
||||
// Translate class name using asgMap
|
||||
let mappedClass = annotation.Label;
|
||||
for (const mapEntry of asgMap) {
|
||||
if (annotation.Label === mapEntry.original) {
|
||||
mappedClass = mapEntry.mapped;
|
||||
break;
|
||||
}
|
||||
}
|
||||
// Only add annotation if mappedClass is valid
|
||||
if (mappedClass && mappedClass in categoryMap) {
|
||||
annotationid += 1;
|
||||
let area = 0;
|
||||
if (annotation.width && annotation.height) {
|
||||
area = annotation.width * annotation.height;
|
||||
}
|
||||
cocoAnnotations.push({
|
||||
id: annotationid,
|
||||
image_id: imageid,
|
||||
category_id: categoryMap[mappedClass],
|
||||
bbox: [annotation.x, annotation.y, annotation.width, annotation.height],
|
||||
area: area,
|
||||
iscrowd: annotation.iscrowd || 0
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Shuffle images for random split using seed
|
||||
function seededRandom(seed) {
|
||||
let x = Math.sin(seed++) * 10000;
|
||||
return x - Math.floor(x);
|
||||
}
|
||||
function shuffle(array, seed) {
|
||||
for (let i = array.length - 1; i > 0; i--) {
|
||||
const j = Math.floor(seededRandom(seed + i) * (i + 1));
|
||||
[array[i], array[j]] = [array[j], array[i]];
|
||||
}
|
||||
}
|
||||
// Use seed from detailsObj if present, else default to 42
|
||||
const splitSeed = detailsObj.seed !== undefined && detailsObj.seed !== null ? Number(detailsObj.seed) : 42;
|
||||
shuffle(cocoImages, splitSeed);
|
||||
|
||||
// Split images
|
||||
const totalImages = cocoImages.length;
|
||||
const trainCount = Math.floor(totalImages * trainPercent / 100);
|
||||
const validCount = Math.floor(totalImages * validPercent / 100);
|
||||
const testCount = totalImages - trainCount - validCount;
|
||||
|
||||
const trainImages = cocoImages.slice(0, trainCount);
|
||||
const validImages = cocoImages.slice(trainCount, trainCount + validCount);
|
||||
const testImages = cocoImages.slice(trainCount + validCount);
|
||||
|
||||
// Helper to get image ids for each split
|
||||
const trainImageIds = new Set(trainImages.map(img => img.id));
|
||||
const validImageIds = new Set(validImages.map(img => img.id));
|
||||
const testImageIds = new Set(testImages.map(img => img.id));
|
||||
|
||||
// Split annotations
|
||||
const trainAnnotations = cocoAnnotations.filter(ann => trainImageIds.has(ann.image_id));
|
||||
const validAnnotations = cocoAnnotations.filter(ann => validImageIds.has(ann.image_id));
|
||||
const testAnnotations = cocoAnnotations.filter(ann => testImageIds.has(ann.image_id));
|
||||
|
||||
// Build final COCO JSONs with info section
|
||||
const buildCocoJson = (images, annotations, categories) => ({
|
||||
images,
|
||||
annotations,
|
||||
categories
|
||||
});
|
||||
|
||||
// Build COCO JSONs with info section
|
||||
const trainJson = buildCocoJson(trainImages, trainAnnotations, cocoCategories);
|
||||
const validJson = buildCocoJson(validImages, validAnnotations, cocoCategories);
|
||||
const testJson = buildCocoJson(testImages, testAnnotations, cocoCategories);
|
||||
|
||||
// Create output directory: projectname/trainingid/annotations
|
||||
const projectName = trainingProject && trainingProject.name ? trainingProject.name.replace(/\s+/g, '_') : `project_${detailsObj.project_id}`;
|
||||
const outDir = `${projectName}/${trainingId}`;
|
||||
const annotationsDir = `/home/kitraining/To_Annotate/annotations`;
|
||||
if (!fs.existsSync(annotationsDir)) {
|
||||
fs.mkdirSync(annotationsDir, { recursive: true });
|
||||
}
|
||||
|
||||
// Write to files in the annotations directory
|
||||
const trainPath = `${annotationsDir}/coco_project_${trainingId}_train.json`;
|
||||
const validPath = `${annotationsDir}/coco_project_${trainingId}_valid.json`;
|
||||
const testPath = `${annotationsDir}/coco_project_${trainingId}_test.json`;
|
||||
fs.writeFileSync(trainPath, JSON.stringify(trainJson, null, 2));
|
||||
fs.writeFileSync(validPath, JSON.stringify(validJson, null, 2));
|
||||
fs.writeFileSync(testPath, JSON.stringify(testJson, null, 2));
|
||||
console.log(`COCO JSON splits written to ${annotationsDir} for trainingId ${trainingId}`);
|
||||
|
||||
|
||||
|
||||
// Also generate inference exp.py in the same output directory as exp.py (project folder in workspace)
|
||||
const { generateYoloxInferenceExp } = require('./generate-yolox-exp');
|
||||
const path = require('path');
|
||||
const projectFolder = path.join(__dirname, '..', projectName, String(trainingId));
|
||||
if (!fs.existsSync(projectFolder)) {
|
||||
fs.mkdirSync(projectFolder, { recursive: true });
|
||||
}
|
||||
const inferenceExpPath = path.join(projectFolder, 'exp_infer.py');
|
||||
generateYoloxInferenceExp(trainingId).then(expContent => {
|
||||
fs.writeFileSync(inferenceExpPath, expContent);
|
||||
console.log(`Inference exp.py written to ${inferenceExpPath}`);
|
||||
}).catch(err => {
|
||||
console.error('Failed to generate inference exp.py:', err);
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
module.exports = {generateTrainingJson};
|
||||
@@ -1,135 +1,135 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const Training = require('../models/training.js');
|
||||
const TrainingProject = require('../models/TrainingProject.js');
|
||||
|
||||
// Remove Python comments and legacy code
|
||||
const exp_names = [
|
||||
'YOLOX-s',
|
||||
'YOLOX-m',
|
||||
'YOLOX-l',
|
||||
'YOLOX-x',
|
||||
'YOLOX-Darknet53', //todo
|
||||
'YOLOX-Nano',
|
||||
'YOLOX-Tiny'
|
||||
]
|
||||
|
||||
//TODO: Clean up generation of exp_names.py and remove second exp creation!!!
|
||||
|
||||
|
||||
// Refactored: Accept trainingId, fetch info from DB
|
||||
async function generateYoloxExp(trainingId) {
|
||||
// Fetch training row from DB by project_details_id if not found by PK
|
||||
let training = await Training.findByPk(trainingId);
|
||||
if (!training) {
|
||||
training = await Training.findOne({ where: { project_details_id: trainingId } });
|
||||
}
|
||||
if (!training) throw new Error('Training not found for trainingId or project_details_id: ' + trainingId);
|
||||
|
||||
// If transfer_learning is 'coco', just return the path to the default exp.py
|
||||
if (training.transfer_learning === 'coco') {
|
||||
const selectedModel = training.selected_model.toLowerCase().replace('-', '_');
|
||||
const expSourcePath = `/home/kitraining/Yolox/YOLOX-main/exps/default/${selectedModel}.py`;
|
||||
if (!fs.existsSync(expSourcePath)) {
|
||||
throw new Error(`Default exp.py not found for model: ${selectedModel} at ${expSourcePath}`);
|
||||
}
|
||||
// Copy to project folder (e.g., /home/kitraining/coco_tool/backend/project_XX/YY/exp.py)
|
||||
const projectDetailsId = training.project_details_id;
|
||||
const projectFolder = path.resolve(__dirname, `../project_23/${projectDetailsId}`);
|
||||
if (!fs.existsSync(projectFolder)) {
|
||||
fs.mkdirSync(projectFolder, { recursive: true });
|
||||
}
|
||||
const expDestPath = path.join(projectFolder, 'exp.py');
|
||||
fs.copyFileSync(expSourcePath, expDestPath);
|
||||
return { type: 'default', expPath: expDestPath };
|
||||
}
|
||||
|
||||
// If transfer_learning is 'sketch', generate a custom exp.py as before
|
||||
if (training.transfer_learning === 'sketch') {
|
||||
// ...existing custom exp.py generation logic here (copy from previous implementation)...
|
||||
// For brevity, you can call generateYoloxInferenceExp or similar here, or inline the logic.
|
||||
// Example:
|
||||
const expContent = await generateYoloxInferenceExp(trainingId);
|
||||
return { type: 'custom', expContent };
|
||||
}
|
||||
|
||||
throw new Error('Unknown transfer_learning type: ' + training.transfer_learning);
|
||||
}
|
||||
|
||||
async function saveYoloxExp(trainingId, outPath) {
|
||||
const expResult = await generateYoloxExp(trainingId);
|
||||
if (expResult.type === 'custom' && expResult.expContent) {
|
||||
fs.writeFileSync(outPath, expResult.expContent);
|
||||
return outPath;
|
||||
} else if (expResult.type === 'default' && expResult.expPath) {
|
||||
// Optionally copy the file if outPath is different
|
||||
if (expResult.expPath !== outPath) {
|
||||
fs.copyFileSync(expResult.expPath, outPath);
|
||||
}
|
||||
return outPath;
|
||||
} else {
|
||||
throw new Error('Unknown expResult type or missing content');
|
||||
}
|
||||
}
|
||||
|
||||
// Generate a second exp.py for inference, using the provided template and DB values
|
||||
async function generateYoloxInferenceExp(trainingId, options = {}) {
|
||||
let training = await Training.findByPk(trainingId);
|
||||
if (!training) {
|
||||
training = await Training.findOne({ where: { project_details_id: trainingId } });
|
||||
}
|
||||
if (!training) throw new Error('Training not found for trainingId or project_details_id: ' + trainingId);
|
||||
// Always use the trainingId (project_details_id) for annotation file names
|
||||
const projectDetailsId = training.project_details_id;
|
||||
const dataDir = options.data_dir || '/home/kitraining/To_Annotate/';
|
||||
const trainAnn = options.train_ann || `coco_project_${trainingId}_train.json`;
|
||||
const valAnn = options.val_ann || `coco_project_${trainingId}_valid.json`;
|
||||
const testAnn = options.test_ann || `coco_project_${trainingId}_test.json`;
|
||||
// Get num_classes from TrainingProject.classes JSON
|
||||
let numClasses = 80;
|
||||
try {
|
||||
const trainingProject = await TrainingProject.findByPk(projectDetailsId);
|
||||
if (trainingProject && trainingProject.classes) {
|
||||
let classesArr = trainingProject.classes;
|
||||
if (typeof classesArr === 'string') {
|
||||
classesArr = JSON.parse(classesArr);
|
||||
}
|
||||
if (Array.isArray(classesArr)) {
|
||||
numClasses = classesArr.filter(c => c !== null && c !== undefined && c !== '').length;
|
||||
} else if (typeof classesArr === 'object' && classesArr !== null) {
|
||||
numClasses = Object.keys(classesArr).filter(k => classesArr[k] !== null && classesArr[k] !== undefined && classesArr[k] !== '').length;
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
console.warn('Could not determine num_classes from TrainingProject.classes:', e);
|
||||
}
|
||||
const depth = options.depth || training.depth || 1.00;
|
||||
const width = options.width || training.width || 1.00;
|
||||
const inputSize = options.input_size || training.input_size || [640, 640];
|
||||
const mosaicScale = options.mosaic_scale || training.mosaic_scale || [0.1, 2];
|
||||
const randomSize = options.random_size || training.random_size || [10, 20];
|
||||
const testSize = options.test_size || training.test_size || [640, 640];
|
||||
const expName = options.exp_name || 'inference_exp';
|
||||
const enableMixup = options.enable_mixup !== undefined ? options.enable_mixup : false;
|
||||
let expContent = '';
|
||||
expContent += `#!/usr/bin/env python3\n# -*- coding:utf-8 -*-\n# Copyright (c) Megvii, Inc. and its affiliates.\n\nimport os\n\nfrom yolox.exp import Exp as MyExp\n\n\nclass Exp(MyExp):\n def __init__(self):\n super(Exp, self).__init__()\n self.data_dir = "${dataDir}"\n self.train_ann = "${trainAnn}"\n self.val_ann = "${valAnn}"\n self.test_ann = "coco_project_${trainingId}_test.json"\n self.num_classes = ${numClasses}\n`;
|
||||
// Set pretrained_ckpt if transfer_learning is 'coco'
|
||||
if (training.transfer_learning && typeof training.transfer_learning === 'string' && training.transfer_learning.toLowerCase() === 'coco') {
|
||||
const yoloxBaseDir = '/home/kitraining/Yolox/YOLOX-main';
|
||||
const selectedModel = training.selected_model ? training.selected_model.replace(/\.pth$/i, '') : '';
|
||||
if (selectedModel) {
|
||||
expContent += ` self.pretrained_ckpt = r'${yoloxBaseDir}/pretrained/${selectedModel}.pth'\n`;
|
||||
}
|
||||
}
|
||||
expContent += ` self.depth = ${depth}\n self.width = ${width}\n self.input_size = (${Array.isArray(inputSize) ? inputSize.join(', ') : inputSize})\n self.mosaic_scale = (${Array.isArray(mosaicScale) ? mosaicScale.join(', ') : mosaicScale})\n self.random_size = (${Array.isArray(randomSize) ? randomSize.join(', ') : randomSize})\n self.test_size = (${Array.isArray(testSize) ? testSize.join(', ') : testSize})\n self.exp_name = os.path.split(os.path.realpath(__file__))[1].split(".")[0]\n self.enable_mixup = ${enableMixup ? 'True' : 'False'}\n`;
|
||||
return expContent;
|
||||
}
|
||||
|
||||
// Save inference exp.py to a custom path
|
||||
async function saveYoloxInferenceExp(trainingId, outPath, options = {}) {
|
||||
const expContent = await generateYoloxInferenceExp(trainingId, options);
|
||||
fs.writeFileSync(outPath, expContent);
|
||||
return outPath;
|
||||
}
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const Training = require('../models/training.js');
|
||||
const TrainingProject = require('../models/TrainingProject.js');
|
||||
|
||||
// Remove Python comments and legacy code
|
||||
const exp_names = [
|
||||
'YOLOX-s',
|
||||
'YOLOX-m',
|
||||
'YOLOX-l',
|
||||
'YOLOX-x',
|
||||
'YOLOX-Darknet53', //todo
|
||||
'YOLOX-Nano',
|
||||
'YOLOX-Tiny'
|
||||
]
|
||||
|
||||
//TODO: Clean up generation of exp_names.py and remove second exp creation!!!
|
||||
|
||||
|
||||
// Refactored: Accept trainingId, fetch info from DB
|
||||
async function generateYoloxExp(trainingId) {
|
||||
// Fetch training row from DB by project_details_id if not found by PK
|
||||
let training = await Training.findByPk(trainingId);
|
||||
if (!training) {
|
||||
training = await Training.findOne({ where: { project_details_id: trainingId } });
|
||||
}
|
||||
if (!training) throw new Error('Training not found for trainingId or project_details_id: ' + trainingId);
|
||||
|
||||
// If transfer_learning is 'coco', just return the path to the default exp.py
|
||||
if (training.transfer_learning === 'coco') {
|
||||
const selectedModel = training.selected_model.toLowerCase().replace('-', '_');
|
||||
const expSourcePath = `/home/kitraining/Yolox/YOLOX-main/exps/default/${selectedModel}.py`;
|
||||
if (!fs.existsSync(expSourcePath)) {
|
||||
throw new Error(`Default exp.py not found for model: ${selectedModel} at ${expSourcePath}`);
|
||||
}
|
||||
// Copy to project folder (e.g., /home/kitraining/coco_tool/backend/project_XX/YY/exp.py)
|
||||
const projectDetailsId = training.project_details_id;
|
||||
const projectFolder = path.resolve(__dirname, `../project_23/${projectDetailsId}`);
|
||||
if (!fs.existsSync(projectFolder)) {
|
||||
fs.mkdirSync(projectFolder, { recursive: true });
|
||||
}
|
||||
const expDestPath = path.join(projectFolder, 'exp.py');
|
||||
fs.copyFileSync(expSourcePath, expDestPath);
|
||||
return { type: 'default', expPath: expDestPath };
|
||||
}
|
||||
|
||||
// If transfer_learning is 'sketch', generate a custom exp.py as before
|
||||
if (training.transfer_learning === 'sketch') {
|
||||
// ...existing custom exp.py generation logic here (copy from previous implementation)...
|
||||
// For brevity, you can call generateYoloxInferenceExp or similar here, or inline the logic.
|
||||
// Example:
|
||||
const expContent = await generateYoloxInferenceExp(trainingId);
|
||||
return { type: 'custom', expContent };
|
||||
}
|
||||
|
||||
throw new Error('Unknown transfer_learning type: ' + training.transfer_learning);
|
||||
}
|
||||
|
||||
async function saveYoloxExp(trainingId, outPath) {
|
||||
const expResult = await generateYoloxExp(trainingId);
|
||||
if (expResult.type === 'custom' && expResult.expContent) {
|
||||
fs.writeFileSync(outPath, expResult.expContent);
|
||||
return outPath;
|
||||
} else if (expResult.type === 'default' && expResult.expPath) {
|
||||
// Optionally copy the file if outPath is different
|
||||
if (expResult.expPath !== outPath) {
|
||||
fs.copyFileSync(expResult.expPath, outPath);
|
||||
}
|
||||
return outPath;
|
||||
} else {
|
||||
throw new Error('Unknown expResult type or missing content');
|
||||
}
|
||||
}
|
||||
|
||||
// Generate a second exp.py for inference, using the provided template and DB values
|
||||
async function generateYoloxInferenceExp(trainingId, options = {}) {
|
||||
let training = await Training.findByPk(trainingId);
|
||||
if (!training) {
|
||||
training = await Training.findOne({ where: { project_details_id: trainingId } });
|
||||
}
|
||||
if (!training) throw new Error('Training not found for trainingId or project_details_id: ' + trainingId);
|
||||
// Always use the trainingId (project_details_id) for annotation file names
|
||||
const projectDetailsId = training.project_details_id;
|
||||
const dataDir = options.data_dir || '/home/kitraining/To_Annotate/';
|
||||
const trainAnn = options.train_ann || `coco_project_${trainingId}_train.json`;
|
||||
const valAnn = options.val_ann || `coco_project_${trainingId}_valid.json`;
|
||||
const testAnn = options.test_ann || `coco_project_${trainingId}_test.json`;
|
||||
// Get num_classes from TrainingProject.classes JSON
|
||||
let numClasses = 80;
|
||||
try {
|
||||
const trainingProject = await TrainingProject.findByPk(projectDetailsId);
|
||||
if (trainingProject && trainingProject.classes) {
|
||||
let classesArr = trainingProject.classes;
|
||||
if (typeof classesArr === 'string') {
|
||||
classesArr = JSON.parse(classesArr);
|
||||
}
|
||||
if (Array.isArray(classesArr)) {
|
||||
numClasses = classesArr.filter(c => c !== null && c !== undefined && c !== '').length;
|
||||
} else if (typeof classesArr === 'object' && classesArr !== null) {
|
||||
numClasses = Object.keys(classesArr).filter(k => classesArr[k] !== null && classesArr[k] !== undefined && classesArr[k] !== '').length;
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
console.warn('Could not determine num_classes from TrainingProject.classes:', e);
|
||||
}
|
||||
const depth = options.depth || training.depth || 1.00;
|
||||
const width = options.width || training.width || 1.00;
|
||||
const inputSize = options.input_size || training.input_size || [640, 640];
|
||||
const mosaicScale = options.mosaic_scale || training.mosaic_scale || [0.1, 2];
|
||||
const randomSize = options.random_size || training.random_size || [10, 20];
|
||||
const testSize = options.test_size || training.test_size || [640, 640];
|
||||
const expName = options.exp_name || 'inference_exp';
|
||||
const enableMixup = options.enable_mixup !== undefined ? options.enable_mixup : false;
|
||||
let expContent = '';
|
||||
expContent += `#!/usr/bin/env python3\n# -*- coding:utf-8 -*-\n# Copyright (c) Megvii, Inc. and its affiliates.\n\nimport os\n\nfrom yolox.exp import Exp as MyExp\n\n\nclass Exp(MyExp):\n def __init__(self):\n super(Exp, self).__init__()\n self.data_dir = "${dataDir}"\n self.train_ann = "${trainAnn}"\n self.val_ann = "${valAnn}"\n self.test_ann = "coco_project_${trainingId}_test.json"\n self.num_classes = ${numClasses}\n`;
|
||||
// Set pretrained_ckpt if transfer_learning is 'coco'
|
||||
if (training.transfer_learning && typeof training.transfer_learning === 'string' && training.transfer_learning.toLowerCase() === 'coco') {
|
||||
const yoloxBaseDir = '/home/kitraining/Yolox/YOLOX-main';
|
||||
const selectedModel = training.selected_model ? training.selected_model.replace(/\.pth$/i, '') : '';
|
||||
if (selectedModel) {
|
||||
expContent += ` self.pretrained_ckpt = r'${yoloxBaseDir}/pretrained/${selectedModel}.pth'\n`;
|
||||
}
|
||||
}
|
||||
expContent += ` self.depth = ${depth}\n self.width = ${width}\n self.input_size = (${Array.isArray(inputSize) ? inputSize.join(', ') : inputSize})\n self.mosaic_scale = (${Array.isArray(mosaicScale) ? mosaicScale.join(', ') : mosaicScale})\n self.random_size = (${Array.isArray(randomSize) ? randomSize.join(', ') : randomSize})\n self.test_size = (${Array.isArray(testSize) ? testSize.join(', ') : testSize})\n self.exp_name = os.path.split(os.path.realpath(__file__))[1].split(".")[0]\n self.enable_mixup = ${enableMixup ? 'True' : 'False'}\n`;
|
||||
return expContent;
|
||||
}
|
||||
|
||||
// Save inference exp.py to a custom path
|
||||
async function saveYoloxInferenceExp(trainingId, outPath, options = {}) {
|
||||
const expContent = await generateYoloxInferenceExp(trainingId, options);
|
||||
fs.writeFileSync(outPath, expContent);
|
||||
return outPath;
|
||||
}
|
||||
|
||||
module.exports = { generateYoloxExp, saveYoloxExp, generateYoloxInferenceExp, saveYoloxInferenceExp };
|
||||
@@ -1,179 +1,288 @@
|
||||
import json
|
||||
import os
|
||||
import math
|
||||
from models.TrainingProject import TrainingProject
|
||||
from models.TrainingProjectDetails import TrainingProjectDetails
|
||||
from models.Images import Image
|
||||
from models.Annotation import Annotation
|
||||
|
||||
def generate_training_json(training_id):
|
||||
"""Generate COCO JSON for training, validation, and test sets"""
|
||||
# training_id is now project_details_id
|
||||
training_project_details = TrainingProjectDetails.query.get(training_id)
|
||||
|
||||
if not training_project_details:
|
||||
raise Exception(f'No TrainingProjectDetails found for project_details_id {training_id}')
|
||||
|
||||
details_obj = training_project_details.to_dict()
|
||||
|
||||
# Get parent project for name
|
||||
training_project = TrainingProject.query.get(details_obj['project_id'])
|
||||
|
||||
# Get split percentages (default values if not set)
|
||||
train_percent = details_obj.get('train_percent', 85)
|
||||
valid_percent = details_obj.get('valid_percent', 10)
|
||||
test_percent = details_obj.get('test_percent', 5)
|
||||
|
||||
coco_images = []
|
||||
coco_annotations = []
|
||||
coco_categories = []
|
||||
category_map = {}
|
||||
category_id = 0
|
||||
image_id = 0
|
||||
annotation_id = 0
|
||||
|
||||
for cls in details_obj['class_map']:
|
||||
asg_map = []
|
||||
list_asg = cls[1]
|
||||
|
||||
for asg in list_asg:
|
||||
asg_map.append({'original': asg[0], 'mapped': asg[1]})
|
||||
# Build category list and mapping
|
||||
if asg[1] and asg[1] not in category_map:
|
||||
category_map[asg[1]] = category_id
|
||||
coco_categories.append({'id': category_id, 'name': asg[1], 'supercategory': ''})
|
||||
category_id += 1
|
||||
|
||||
# Get images for this project
|
||||
images = Image.query.filter_by(project_id=cls[0]).all()
|
||||
|
||||
for image in images:
|
||||
image_id += 1
|
||||
file_name = image.image_path
|
||||
|
||||
# Clean up file path
|
||||
if '%20' in file_name:
|
||||
file_name = file_name.replace('%20', ' ')
|
||||
if file_name and file_name.startswith('/data/local-files/?d='):
|
||||
file_name = file_name.replace('/data/local-files/?d=', '')
|
||||
file_name = file_name.replace('/home/kitraining/home/kitraining/', '')
|
||||
if file_name and file_name.startswith('home/kitraining/To_Annotate/'):
|
||||
file_name = file_name.replace('home/kitraining/To_Annotate/', '')
|
||||
|
||||
# Get annotations for this image
|
||||
annotations = Annotation.query.filter_by(image_id=image.image_id).all()
|
||||
|
||||
coco_images.append({
|
||||
'id': image_id,
|
||||
'file_name': file_name,
|
||||
'width': image.width or 0,
|
||||
'height': image.height or 0
|
||||
})
|
||||
|
||||
for annotation in annotations:
|
||||
# Translate class name using asg_map
|
||||
mapped_class = annotation.Label
|
||||
for map_entry in asg_map:
|
||||
if annotation.Label == map_entry['original']:
|
||||
mapped_class = map_entry['mapped']
|
||||
break
|
||||
|
||||
# Only add annotation if mapped_class is valid
|
||||
if mapped_class and mapped_class in category_map:
|
||||
annotation_id += 1
|
||||
area = 0
|
||||
if annotation.width and annotation.height:
|
||||
area = annotation.width * annotation.height
|
||||
|
||||
coco_annotations.append({
|
||||
'id': annotation_id,
|
||||
'image_id': image_id,
|
||||
'category_id': category_map[mapped_class],
|
||||
'bbox': [annotation.x, annotation.y, annotation.width, annotation.height],
|
||||
'area': area,
|
||||
'iscrowd': 0
|
||||
})
|
||||
|
||||
# Shuffle images for random split using seed
|
||||
def seeded_random(seed):
|
||||
x = math.sin(seed) * 10000
|
||||
return x - math.floor(x)
|
||||
|
||||
def shuffle(array, seed):
|
||||
for i in range(len(array) - 1, 0, -1):
|
||||
j = int(seeded_random(seed + i) * (i + 1))
|
||||
array[i], array[j] = array[j], array[i]
|
||||
|
||||
# Use seed from details_obj if present, else default to 42
|
||||
split_seed = details_obj.get('seed', 42)
|
||||
if split_seed is not None:
|
||||
split_seed = int(split_seed)
|
||||
else:
|
||||
split_seed = 42
|
||||
|
||||
shuffle(coco_images, split_seed)
|
||||
|
||||
# Split images
|
||||
total_images = len(coco_images)
|
||||
train_count = int(total_images * train_percent / 100)
|
||||
valid_count = int(total_images * valid_percent / 100)
|
||||
test_count = total_images - train_count - valid_count
|
||||
|
||||
train_images = coco_images[0:train_count]
|
||||
valid_images = coco_images[train_count:train_count + valid_count]
|
||||
test_images = coco_images[train_count + valid_count:]
|
||||
|
||||
# Helper to get image ids for each split
|
||||
train_image_ids = {img['id'] for img in train_images}
|
||||
valid_image_ids = {img['id'] for img in valid_images}
|
||||
test_image_ids = {img['id'] for img in test_images}
|
||||
|
||||
# Split annotations
|
||||
train_annotations = [ann for ann in coco_annotations if ann['image_id'] in train_image_ids]
|
||||
valid_annotations = [ann for ann in coco_annotations if ann['image_id'] in valid_image_ids]
|
||||
test_annotations = [ann for ann in coco_annotations if ann['image_id'] in test_image_ids]
|
||||
|
||||
# Build final COCO JSONs
|
||||
def build_coco_json(images, annotations, categories):
|
||||
return {
|
||||
'images': images,
|
||||
'annotations': annotations,
|
||||
'categories': categories
|
||||
}
|
||||
|
||||
train_json = build_coco_json(train_images, train_annotations, coco_categories)
|
||||
valid_json = build_coco_json(valid_images, valid_annotations, coco_categories)
|
||||
test_json = build_coco_json(test_images, test_annotations, coco_categories)
|
||||
|
||||
# Create output directory
|
||||
project_name = training_project.title.replace(' ', '_') if training_project and training_project.title else f'project_{details_obj["project_id"]}'
|
||||
annotations_dir = '/home/kitraining/To_Annotate/annotations'
|
||||
os.makedirs(annotations_dir, exist_ok=True)
|
||||
|
||||
# Write to files
|
||||
train_path = f'{annotations_dir}/coco_project_{training_id}_train.json'
|
||||
valid_path = f'{annotations_dir}/coco_project_{training_id}_valid.json'
|
||||
test_path = f'{annotations_dir}/coco_project_{training_id}_test.json'
|
||||
|
||||
with open(train_path, 'w') as f:
|
||||
json.dump(train_json, f, indent=2)
|
||||
with open(valid_path, 'w') as f:
|
||||
json.dump(valid_json, f, indent=2)
|
||||
with open(test_path, 'w') as f:
|
||||
json.dump(test_json, f, indent=2)
|
||||
|
||||
print(f'COCO JSON splits written to {annotations_dir} for trainingId {training_id}')
|
||||
|
||||
# Also generate inference exp.py
|
||||
from services.generate_yolox_exp import generate_yolox_inference_exp
|
||||
project_folder = os.path.join(os.path.dirname(__file__), '..', project_name, str(training_id))
|
||||
os.makedirs(project_folder, exist_ok=True)
|
||||
|
||||
inference_exp_path = os.path.join(project_folder, 'exp_infer.py')
|
||||
try:
|
||||
exp_content = generate_yolox_inference_exp(training_id)
|
||||
with open(inference_exp_path, 'w') as f:
|
||||
f.write(exp_content)
|
||||
print(f'Inference exp.py written to {inference_exp_path}')
|
||||
except Exception as err:
|
||||
print(f'Failed to generate inference exp.py: {err}')
|
||||
import json
|
||||
import os
|
||||
import math
|
||||
from models.TrainingProject import TrainingProject
|
||||
from models.TrainingProjectDetails import TrainingProjectDetails
|
||||
from models.Images import Image
|
||||
from models.Annotation import Annotation
|
||||
|
||||
def generate_training_json(training_id):
|
||||
"""Generate COCO JSON for training, validation, and test sets"""
|
||||
# training_id is now project_details_id
|
||||
training_project_details = TrainingProjectDetails.query.get(training_id)
|
||||
|
||||
if not training_project_details:
|
||||
raise Exception(f'No TrainingProjectDetails found for project_details_id {training_id}')
|
||||
|
||||
details_obj = training_project_details.to_dict()
|
||||
|
||||
# Get parent project for name
|
||||
training_project = TrainingProject.query.get(details_obj['project_id'])
|
||||
|
||||
# Get the data directory setting for image paths
|
||||
from services.settings_service import get_setting
|
||||
data_dir = get_setting('yolox_data_dir', '/home/kitraining/To_Annotate/')
|
||||
|
||||
# Fix UNC path if it's missing the \\ prefix
|
||||
# Check if it looks like a UNC path without proper prefix (e.g., "192.168.1.19\...")
|
||||
if data_dir and not data_dir.startswith('\\\\') and not data_dir.startswith('/'):
|
||||
# Check if it starts with an IP address pattern
|
||||
import re
|
||||
if re.match(r'^\d+\.\d+\.\d+\.\d+[/\\]', data_dir):
|
||||
data_dir = '\\\\' + data_dir
|
||||
|
||||
# Ensure data_dir ends with separator
|
||||
if not data_dir.endswith(os.sep) and not data_dir.endswith('/'):
|
||||
data_dir += os.sep
|
||||
|
||||
# Get split percentages (default values if not set)
|
||||
train_percent = details_obj.get('train_percent', 85)
|
||||
valid_percent = details_obj.get('valid_percent', 10)
|
||||
test_percent = details_obj.get('test_percent', 5)
|
||||
|
||||
coco_images = []
|
||||
coco_annotations = []
|
||||
coco_categories = []
|
||||
category_map = {}
|
||||
category_id = 0
|
||||
image_id = 0
|
||||
annotation_id = 0
|
||||
|
||||
# Build category list and mapping from class_map dictionary {source: target}
|
||||
class_map = details_obj.get('class_map', {})
|
||||
|
||||
for source_class, target_class in class_map.items():
|
||||
if target_class and target_class not in category_map:
|
||||
category_map[target_class] = category_id
|
||||
coco_categories.append({'id': category_id, 'name': target_class, 'supercategory': ''})
|
||||
category_id += 1
|
||||
|
||||
# Get all annotation projects (Label Studio project IDs)
|
||||
annotation_projects = details_obj.get('annotation_projects', [])
|
||||
|
||||
# Get class mappings from database grouped by Label Studio project
|
||||
from models.ClassMapping import ClassMapping
|
||||
all_mappings = ClassMapping.query.filter_by(project_details_id=training_id).all()
|
||||
|
||||
# Group mappings by Label Studio project ID
|
||||
mappings_by_project = {}
|
||||
for mapping in all_mappings:
|
||||
ls_proj_id = mapping.label_studio_project_id
|
||||
if ls_proj_id not in mappings_by_project:
|
||||
mappings_by_project[ls_proj_id] = {}
|
||||
mappings_by_project[ls_proj_id][mapping.source_class] = mapping.target_class
|
||||
|
||||
# Also add target class to category map if not present
|
||||
if mapping.target_class and mapping.target_class not in category_map:
|
||||
category_map[mapping.target_class] = category_id
|
||||
coco_categories.append({'id': category_id, 'name': mapping.target_class, 'supercategory': ''})
|
||||
category_id += 1
|
||||
|
||||
# Iterate through each annotation project to collect images and annotations
|
||||
for ls_project_id in annotation_projects:
|
||||
# Get images for this Label Studio project
|
||||
images = Image.query.filter_by(project_id=ls_project_id).all()
|
||||
|
||||
for image in images:
|
||||
image_id += 1
|
||||
file_name = image.image_path
|
||||
|
||||
# Clean up file path from Label Studio format
|
||||
if '%20' in file_name:
|
||||
file_name = file_name.replace('%20', ' ')
|
||||
if file_name and file_name.startswith('/data/local-files/?d='):
|
||||
file_name = file_name.replace('/data/local-files/?d=', '')
|
||||
|
||||
# Remove any Label Studio prefixes but keep full path
|
||||
# Common Label Studio patterns
|
||||
prefixes_to_remove = [
|
||||
'//192.168.1.19/home/kitraining/To_Annotate/',
|
||||
'192.168.1.19/home/kitraining/To_Annotate/',
|
||||
'/home/kitraining/home/kitraining/',
|
||||
'home/kitraining/To_Annotate/',
|
||||
'/home/kitraining/To_Annotate/',
|
||||
]
|
||||
|
||||
# Try each prefix
|
||||
for prefix in prefixes_to_remove:
|
||||
if file_name.startswith(prefix):
|
||||
file_name = file_name[len(prefix):]
|
||||
break
|
||||
|
||||
# Construct ABSOLUTE path using data_dir
|
||||
# Detect platform for proper path handling
|
||||
import platform
|
||||
is_windows = platform.system() == 'Windows'
|
||||
|
||||
# Normalize path separators in file_name to forward slashes first (OS-agnostic)
|
||||
file_name = file_name.replace('\\', '/')
|
||||
|
||||
# Normalize data_dir to use forward slashes
|
||||
normalized_data_dir = data_dir.rstrip('/\\').replace('\\', '/')
|
||||
|
||||
# Check if file_name is already an absolute path
|
||||
is_absolute = False
|
||||
if is_windows:
|
||||
# Windows: Check for drive letter (C:/) or UNC path (//server/)
|
||||
is_absolute = (len(file_name) > 1 and file_name[1] == ':') or file_name.startswith('//')
|
||||
else:
|
||||
# Linux/Mac: Check for leading /
|
||||
is_absolute = file_name.startswith('/')
|
||||
|
||||
if not is_absolute:
|
||||
# It's a relative path, combine with data_dir
|
||||
if normalized_data_dir.startswith('//'):
|
||||
# UNC path on Windows
|
||||
file_name = normalized_data_dir + '/' + file_name
|
||||
else:
|
||||
# Regular path - use os.path.join but with forward slashes
|
||||
file_name = os.path.join(normalized_data_dir, file_name).replace('\\', '/')
|
||||
|
||||
# Final OS-specific normalization
|
||||
if is_windows:
|
||||
# Convert to Windows-style backslashes
|
||||
file_name = file_name.replace('/', '\\')
|
||||
else:
|
||||
# Keep as forward slashes for Linux/Mac
|
||||
file_name = file_name.replace('\\', '/')
|
||||
|
||||
# Get annotations for this image
|
||||
annotations = Annotation.query.filter_by(image_id=image.image_id).all()
|
||||
|
||||
# Ensure width and height are integers and valid
|
||||
# If missing or invalid, skip this image or use default dimensions
|
||||
img_width = int(image.width) if image.width else 0
|
||||
img_height = int(image.height) if image.height else 0
|
||||
|
||||
# Skip images with invalid dimensions
|
||||
if img_width <= 0 or img_height <= 0:
|
||||
print(f'Warning: Skipping image {file_name} with invalid dimensions: {img_width}x{img_height}')
|
||||
continue
|
||||
|
||||
coco_images.append({
|
||||
'id': image_id,
|
||||
'file_name': file_name, # Use absolute path
|
||||
'width': img_width,
|
||||
'height': img_height
|
||||
})
|
||||
|
||||
for annotation in annotations:
|
||||
# Translate class name using class_map for this specific Label Studio project
|
||||
original_class = annotation.Label
|
||||
project_class_map = mappings_by_project.get(ls_project_id, {})
|
||||
mapped_class = project_class_map.get(original_class, original_class)
|
||||
|
||||
# Only add annotation if mapped_class is valid
|
||||
if mapped_class and mapped_class in category_map:
|
||||
annotation_id += 1
|
||||
area = 0
|
||||
if annotation.width and annotation.height:
|
||||
area = annotation.width * annotation.height
|
||||
|
||||
coco_annotations.append({
|
||||
'id': annotation_id,
|
||||
'image_id': image_id,
|
||||
'category_id': category_map[mapped_class],
|
||||
'bbox': [annotation.x, annotation.y, annotation.width, annotation.height],
|
||||
'area': area,
|
||||
'iscrowd': 0
|
||||
})
|
||||
|
||||
# Shuffle images for random split using seed
|
||||
def seeded_random(seed):
|
||||
x = math.sin(seed) * 10000
|
||||
return x - math.floor(x)
|
||||
|
||||
def shuffle(array, seed):
|
||||
for i in range(len(array) - 1, 0, -1):
|
||||
j = int(seeded_random(seed + i) * (i + 1))
|
||||
array[i], array[j] = array[j], array[i]
|
||||
|
||||
# Use seed from details_obj if present, else default to 42
|
||||
split_seed = details_obj.get('seed', 42)
|
||||
if split_seed is not None:
|
||||
split_seed = int(split_seed)
|
||||
else:
|
||||
split_seed = 42
|
||||
|
||||
shuffle(coco_images, split_seed)
|
||||
|
||||
# Split images
|
||||
total_images = len(coco_images)
|
||||
train_count = int(total_images * train_percent / 100)
|
||||
valid_count = int(total_images * valid_percent / 100)
|
||||
test_count = total_images - train_count - valid_count
|
||||
|
||||
train_images = coco_images[0:train_count]
|
||||
valid_images = coco_images[train_count:train_count + valid_count]
|
||||
test_images = coco_images[train_count + valid_count:]
|
||||
|
||||
# Helper to get image ids for each split
|
||||
train_image_ids = {img['id'] for img in train_images}
|
||||
valid_image_ids = {img['id'] for img in valid_images}
|
||||
test_image_ids = {img['id'] for img in test_images}
|
||||
|
||||
# Split annotations
|
||||
train_annotations = [ann for ann in coco_annotations if ann['image_id'] in train_image_ids]
|
||||
valid_annotations = [ann for ann in coco_annotations if ann['image_id'] in valid_image_ids]
|
||||
test_annotations = [ann for ann in coco_annotations if ann['image_id'] in test_image_ids]
|
||||
|
||||
# Build final COCO JSONs
|
||||
def build_coco_json(images, annotations, categories):
|
||||
return {
|
||||
'images': images,
|
||||
'annotations': annotations,
|
||||
'categories': categories
|
||||
}
|
||||
|
||||
train_json = build_coco_json(train_images, train_annotations, coco_categories)
|
||||
valid_json = build_coco_json(valid_images, valid_annotations, coco_categories)
|
||||
test_json = build_coco_json(test_images, test_annotations, coco_categories)
|
||||
|
||||
# Create output directory
|
||||
from services.settings_service import get_setting
|
||||
from models.training import Training
|
||||
|
||||
output_base_path = get_setting('yolox_output_path', './backend')
|
||||
|
||||
project_name = training_project.title.replace(' ', '_') if training_project and training_project.title else f'project_{details_obj["project_id"]}'
|
||||
|
||||
# Get training record to use its name for folder
|
||||
training_record = Training.query.filter_by(project_details_id=training_id).first()
|
||||
training_folder_name = f"{training_record.exp_name or training_record.training_name or 'training'}_{training_record.id}" if training_record else str(training_id)
|
||||
training_folder_name = training_folder_name.replace(' ', '_')
|
||||
|
||||
# Use training_record.id for file names to match what generate_yolox_exp expects
|
||||
training_file_id = training_record.id if training_record else training_id
|
||||
|
||||
# Save annotations to the configured output folder
|
||||
annotations_dir = os.path.join(output_base_path, project_name, training_folder_name, 'annotations')
|
||||
os.makedirs(annotations_dir, exist_ok=True)
|
||||
|
||||
# Write to files
|
||||
train_path = os.path.join(annotations_dir, f'coco_project_{training_file_id}_train.json')
|
||||
valid_path = os.path.join(annotations_dir, f'coco_project_{training_file_id}_valid.json')
|
||||
test_path = os.path.join(annotations_dir, f'coco_project_{training_file_id}_test.json')
|
||||
|
||||
with open(train_path, 'w') as f:
|
||||
json.dump(train_json, f, indent=2)
|
||||
with open(valid_path, 'w') as f:
|
||||
json.dump(valid_json, f, indent=2)
|
||||
with open(test_path, 'w') as f:
|
||||
json.dump(test_json, f, indent=2)
|
||||
|
||||
print(f'COCO JSON splits written to {annotations_dir} for trainingId {training_id}')
|
||||
|
||||
# Also generate inference exp.py
|
||||
from services.generate_yolox_exp import generate_yolox_inference_exp
|
||||
project_folder = os.path.join(output_base_path, project_name, str(training_id))
|
||||
os.makedirs(project_folder, exist_ok=True)
|
||||
|
||||
inference_exp_path = os.path.join(project_folder, 'exp_infer.py')
|
||||
try:
|
||||
exp_content = generate_yolox_inference_exp(training_id)
|
||||
with open(inference_exp_path, 'w') as f:
|
||||
f.write(exp_content)
|
||||
print(f'Inference exp.py written to {inference_exp_path}')
|
||||
except Exception as err:
|
||||
print(f'Failed to generate inference exp.py: {err}')
|
||||
|
||||
@@ -1,228 +1,329 @@
|
||||
import os
|
||||
import shutil
|
||||
import importlib.util
|
||||
from models.training import Training
|
||||
from models.TrainingProject import TrainingProject
|
||||
|
||||
def load_base_config(selected_model):
|
||||
"""Load base configuration for a specific YOLOX model"""
|
||||
model_name = selected_model.lower().replace('-', '_').replace('.pth', '')
|
||||
base_config_path = os.path.join(os.path.dirname(__file__), '..', 'data', f'{model_name}.py')
|
||||
|
||||
if not os.path.exists(base_config_path):
|
||||
raise Exception(f'Base configuration not found for model: {model_name} at {base_config_path}')
|
||||
|
||||
# Load the module dynamically
|
||||
spec = importlib.util.spec_from_file_location(f"base_config_{model_name}", base_config_path)
|
||||
module = importlib.util.module_from_spec(spec)
|
||||
spec.loader.exec_module(module)
|
||||
|
||||
# Extract all attributes from BaseExp class
|
||||
base_exp = module.BaseExp()
|
||||
base_config = {}
|
||||
for attr in dir(base_exp):
|
||||
if not attr.startswith('_'):
|
||||
base_config[attr] = getattr(base_exp, attr)
|
||||
|
||||
return base_config
|
||||
|
||||
def generate_yolox_exp(training_id):
|
||||
"""Generate YOLOX exp.py file"""
|
||||
# Fetch training row from DB
|
||||
training = Training.query.get(training_id)
|
||||
if not training:
|
||||
training = Training.query.filter_by(project_details_id=training_id).first()
|
||||
|
||||
if not training:
|
||||
raise Exception(f'Training not found for trainingId or project_details_id: {training_id}')
|
||||
|
||||
# If transfer_learning is 'coco', generate exp using base config + custom settings
|
||||
if training.transfer_learning == 'coco':
|
||||
exp_content = generate_yolox_inference_exp(training_id, use_base_config=True)
|
||||
return {'type': 'custom', 'expContent': exp_content}
|
||||
|
||||
# If transfer_learning is 'sketch', generate custom exp.py
|
||||
if training.transfer_learning == 'sketch':
|
||||
exp_content = generate_yolox_inference_exp(training_id, use_base_config=False)
|
||||
return {'type': 'custom', 'expContent': exp_content}
|
||||
|
||||
raise Exception(f'Unknown transfer_learning type: {training.transfer_learning}')
|
||||
|
||||
def save_yolox_exp(training_id, out_path):
|
||||
"""Save YOLOX exp.py to specified path"""
|
||||
exp_result = generate_yolox_exp(training_id)
|
||||
|
||||
if exp_result['type'] == 'custom' and 'expContent' in exp_result:
|
||||
with open(out_path, 'w') as f:
|
||||
f.write(exp_result['expContent'])
|
||||
return out_path
|
||||
elif exp_result['type'] == 'default' and 'expPath' in exp_result:
|
||||
# Optionally copy the file if outPath is different
|
||||
if exp_result['expPath'] != out_path:
|
||||
shutil.copyfile(exp_result['expPath'], out_path)
|
||||
return out_path
|
||||
else:
|
||||
raise Exception('Unknown expResult type or missing content')
|
||||
|
||||
def generate_yolox_inference_exp(training_id, options=None, use_base_config=False):
|
||||
"""Generate inference exp.py using DB values
|
||||
|
||||
Args:
|
||||
training_id: The training/project_details ID
|
||||
options: Optional overrides for data paths
|
||||
use_base_config: If True, load base config and only override with user-defined values
|
||||
"""
|
||||
if options is None:
|
||||
options = {}
|
||||
|
||||
training = Training.query.get(training_id)
|
||||
if not training:
|
||||
training = Training.query.filter_by(project_details_id=training_id).first()
|
||||
|
||||
if not training:
|
||||
raise Exception(f'Training not found for trainingId or project_details_id: {training_id}')
|
||||
|
||||
# Always use the training_id (project_details_id) for annotation file names
|
||||
project_details_id = training.project_details_id
|
||||
|
||||
data_dir = options.get('data_dir', '/home/kitraining/To_Annotate/')
|
||||
train_ann = options.get('train_ann', f'coco_project_{training_id}_train.json')
|
||||
val_ann = options.get('val_ann', f'coco_project_{training_id}_valid.json')
|
||||
test_ann = options.get('test_ann', f'coco_project_{training_id}_test.json')
|
||||
|
||||
# Get num_classes from TrainingProject.classes JSON
|
||||
num_classes = 80
|
||||
try:
|
||||
training_project = TrainingProject.query.get(project_details_id)
|
||||
if training_project and training_project.classes:
|
||||
classes_arr = training_project.classes
|
||||
if isinstance(classes_arr, str):
|
||||
import json
|
||||
classes_arr = json.loads(classes_arr)
|
||||
|
||||
if isinstance(classes_arr, list):
|
||||
num_classes = len([c for c in classes_arr if c not in [None, '']])
|
||||
elif isinstance(classes_arr, dict):
|
||||
num_classes = len([k for k, v in classes_arr.items() if v not in [None, '']])
|
||||
except Exception as e:
|
||||
print(f'Could not determine num_classes from TrainingProject.classes: {e}')
|
||||
|
||||
# Initialize config dictionary
|
||||
config = {}
|
||||
|
||||
# If using base config (transfer learning from COCO), load protected parameters first
|
||||
if use_base_config and training.selected_model:
|
||||
try:
|
||||
base_config = load_base_config(training.selected_model)
|
||||
config.update(base_config)
|
||||
print(f'Loaded base config for {training.selected_model}: {list(base_config.keys())}')
|
||||
except Exception as e:
|
||||
print(f'Warning: Could not load base config for {training.selected_model}: {e}')
|
||||
print('Falling back to custom settings only')
|
||||
|
||||
# Override with user-defined values from training table (only if they exist and are not None)
|
||||
user_overrides = {
|
||||
'depth': training.depth,
|
||||
'width': training.width,
|
||||
'input_size': training.input_size,
|
||||
'mosaic_scale': training.mosaic_scale,
|
||||
'test_size': training.test_size,
|
||||
'enable_mixup': training.enable_mixup,
|
||||
'max_epoch': training.max_epoch,
|
||||
'warmup_epochs': training.warmup_epochs,
|
||||
'warmup_lr': training.warmup_lr,
|
||||
'basic_lr_per_img': training.basic_lr_per_img,
|
||||
'scheduler': training.scheduler,
|
||||
'no_aug_epochs': training.no_aug_epochs,
|
||||
'min_lr_ratio': training.min_lr_ratio,
|
||||
'ema': training.ema,
|
||||
'weight_decay': training.weight_decay,
|
||||
'momentum': training.momentum,
|
||||
'print_interval': training.print_interval,
|
||||
'eval_interval': training.eval_interval,
|
||||
'test_conf': training.test_conf,
|
||||
'nms_thre': training.nms_thre,
|
||||
'mosaic_prob': training.mosaic_prob,
|
||||
'mixup_prob': training.mixup_prob,
|
||||
'hsv_prob': training.hsv_prob,
|
||||
'flip_prob': training.flip_prob,
|
||||
'degrees': training.degrees,
|
||||
'translate': training.translate,
|
||||
'shear': training.shear,
|
||||
'mixup_scale': training.mixup_scale,
|
||||
'activation': training.activation,
|
||||
}
|
||||
|
||||
# Only override if value is explicitly set (not None)
|
||||
for key, value in user_overrides.items():
|
||||
if value is not None:
|
||||
config[key] = value
|
||||
|
||||
# Apply any additional options overrides
|
||||
config.update(options)
|
||||
|
||||
# Set defaults for any missing required parameters
|
||||
config.setdefault('depth', 1.00)
|
||||
config.setdefault('width', 1.00)
|
||||
config.setdefault('input_size', [640, 640])
|
||||
config.setdefault('mosaic_scale', [0.1, 2])
|
||||
config.setdefault('random_size', [10, 20])
|
||||
config.setdefault('test_size', [640, 640])
|
||||
config.setdefault('enable_mixup', False)
|
||||
config.setdefault('exp_name', 'inference_exp')
|
||||
|
||||
# Build exp content
|
||||
exp_content = f'''#!/usr/bin/env python3
|
||||
# -*- coding:utf-8 -*-
|
||||
# Copyright (c) Megvii, Inc. and its affiliates.
|
||||
|
||||
import os
|
||||
|
||||
from yolox.exp import Exp as MyExp
|
||||
|
||||
|
||||
class Exp(MyExp):
|
||||
def __init__(self):
|
||||
super(Exp, self).__init__()
|
||||
self.data_dir = "{data_dir}"
|
||||
self.train_ann = "{train_ann}"
|
||||
self.val_ann = "{val_ann}"
|
||||
self.test_ann = "{test_ann}"
|
||||
self.num_classes = {num_classes}
|
||||
'''
|
||||
|
||||
# Set pretrained_ckpt if transfer_learning is 'coco'
|
||||
if training.transfer_learning and isinstance(training.transfer_learning, str) and training.transfer_learning.lower() == 'coco':
|
||||
yolox_base_dir = '/home/kitraining/Yolox/YOLOX-main'
|
||||
selected_model = training.selected_model.replace('.pth', '') if training.selected_model else ''
|
||||
if selected_model:
|
||||
exp_content += f" self.pretrained_ckpt = r'{yolox_base_dir}/pretrained/{selected_model}.pth'\n"
|
||||
|
||||
# Format arrays
|
||||
def format_value(val):
|
||||
if isinstance(val, (list, tuple)):
|
||||
return '(' + ', '.join(map(str, val)) + ')'
|
||||
elif isinstance(val, bool):
|
||||
return str(val)
|
||||
elif isinstance(val, str):
|
||||
return f'"{val}"'
|
||||
else:
|
||||
return str(val)
|
||||
|
||||
# Add all config parameters to exp
|
||||
for key, value in config.items():
|
||||
if key not in ['exp_name']: # exp_name is handled separately
|
||||
exp_content += f" self.{key} = {format_value(value)}\n"
|
||||
|
||||
# Add exp_name at the end (uses dynamic path)
|
||||
exp_content += f''' self.exp_name = os.path.split(os.path.realpath(__file__))[1].split(".")[0]
|
||||
'''
|
||||
|
||||
return exp_content
|
||||
|
||||
def save_yolox_inference_exp(training_id, out_path, options=None):
|
||||
"""Save inference exp.py to custom path"""
|
||||
exp_content = generate_yolox_inference_exp(training_id, options, use_base_config=False)
|
||||
with open(out_path, 'w') as f:
|
||||
f.write(exp_content)
|
||||
return out_path
|
||||
import os
|
||||
import shutil
|
||||
import importlib.util
|
||||
from models.training import Training
|
||||
from models.TrainingProject import TrainingProject
|
||||
|
||||
def load_base_config(selected_model):
|
||||
"""Load base configuration for a specific YOLOX model"""
|
||||
model_name = selected_model.lower().replace('-', '_').replace('.pth', '')
|
||||
base_config_path = os.path.join(os.path.dirname(__file__), '..', 'data', f'{model_name}.py')
|
||||
|
||||
if not os.path.exists(base_config_path):
|
||||
raise Exception(f'Base configuration not found for model: {model_name} at {base_config_path}')
|
||||
|
||||
# Load the module dynamically
|
||||
spec = importlib.util.spec_from_file_location(f"base_config_{model_name}", base_config_path)
|
||||
module = importlib.util.module_from_spec(spec)
|
||||
spec.loader.exec_module(module)
|
||||
|
||||
# Extract all attributes from BaseExp class
|
||||
base_exp = module.BaseExp()
|
||||
base_config = {}
|
||||
for attr in dir(base_exp):
|
||||
if not attr.startswith('_'):
|
||||
base_config[attr] = getattr(base_exp, attr)
|
||||
|
||||
return base_config
|
||||
|
||||
def generate_yolox_exp(training_id):
|
||||
"""Generate YOLOX exp.py file"""
|
||||
# Fetch training row from DB
|
||||
training = Training.query.get(training_id)
|
||||
if not training:
|
||||
training = Training.query.filter_by(project_details_id=training_id).first()
|
||||
|
||||
if not training:
|
||||
raise Exception(f'Training not found for trainingId or project_details_id: {training_id}')
|
||||
|
||||
# If transfer_learning is 'coco', generate exp using base config + custom settings
|
||||
if training.transfer_learning == 'coco':
|
||||
exp_content = generate_yolox_inference_exp(training_id, use_base_config=True)
|
||||
return {'type': 'custom', 'expContent': exp_content}
|
||||
|
||||
# If transfer_learning is 'sketch', generate custom exp.py
|
||||
if training.transfer_learning == 'sketch':
|
||||
exp_content = generate_yolox_inference_exp(training_id, use_base_config=False)
|
||||
return {'type': 'custom', 'expContent': exp_content}
|
||||
|
||||
raise Exception(f'Unknown transfer_learning type: {training.transfer_learning}')
|
||||
|
||||
def save_yolox_exp(training_id, out_path):
|
||||
"""Save YOLOX exp.py to specified path"""
|
||||
exp_result = generate_yolox_exp(training_id)
|
||||
|
||||
if exp_result['type'] == 'custom' and 'expContent' in exp_result:
|
||||
with open(out_path, 'w') as f:
|
||||
f.write(exp_result['expContent'])
|
||||
return out_path
|
||||
elif exp_result['type'] == 'default' and 'expPath' in exp_result:
|
||||
# Optionally copy the file if outPath is different
|
||||
if exp_result['expPath'] != out_path:
|
||||
shutil.copyfile(exp_result['expPath'], out_path)
|
||||
return out_path
|
||||
else:
|
||||
raise Exception('Unknown expResult type or missing content')
|
||||
|
||||
def generate_yolox_inference_exp(training_id, options=None, use_base_config=False):
|
||||
"""Generate inference exp.py using DB values
|
||||
|
||||
Args:
|
||||
training_id: The training/project_details ID
|
||||
options: Optional overrides for data paths
|
||||
use_base_config: If True, load base config and only override with user-defined values
|
||||
"""
|
||||
if options is None:
|
||||
options = {}
|
||||
|
||||
training = Training.query.get(training_id)
|
||||
if not training:
|
||||
training = Training.query.filter_by(project_details_id=training_id).first()
|
||||
|
||||
if not training:
|
||||
raise Exception(f'Training not found for trainingId or project_details_id: {training_id}')
|
||||
|
||||
# Always use the project_details_id for annotation file names and paths
|
||||
project_details_id = training.project_details_id
|
||||
|
||||
# Get annotation file names from options or use defaults
|
||||
# Use training.id (not project_details_id) for consistency with generate_training_json
|
||||
train_ann = options.get('train_ann', f'coco_project_{training_id}_train.json')
|
||||
val_ann = options.get('val_ann', f'coco_project_{training_id}_valid.json')
|
||||
test_ann = options.get('test_ann', f'coco_project_{training_id}_test.json')
|
||||
|
||||
# Get data_dir - this should point to where IMAGES are located (not annotations)
|
||||
# YOLOX will combine data_dir + file_name from COCO JSON to find images
|
||||
# The annotations are in a separate location (output folder)
|
||||
from services.settings_service import get_setting
|
||||
from models.TrainingProjectDetails import TrainingProjectDetails
|
||||
|
||||
if 'data_dir' in options:
|
||||
data_dir = options['data_dir']
|
||||
else:
|
||||
# Use the yolox_data_dir setting - this is where training images are stored
|
||||
data_dir = get_setting('yolox_data_dir', '/home/kitraining/To_Annotate/')
|
||||
# Ensure it ends with a separator
|
||||
if not data_dir.endswith(os.sep) and not data_dir.endswith('/'):
|
||||
data_dir += os.sep
|
||||
|
||||
# Get num_classes from ProjectClass table (3NF)
|
||||
num_classes = 80
|
||||
try:
|
||||
from models.ProjectClass import ProjectClass
|
||||
training_project = TrainingProject.query.get(project_details_id)
|
||||
if training_project:
|
||||
# Count classes from ProjectClass table
|
||||
class_count = ProjectClass.query.filter_by(project_id=training_project.project_id).count()
|
||||
if class_count > 0:
|
||||
num_classes = class_count
|
||||
except Exception as e:
|
||||
print(f'Could not determine num_classes from ProjectClass: {e}')
|
||||
|
||||
# Initialize config dictionary
|
||||
config = {}
|
||||
|
||||
# If using base config (transfer learning from COCO), load protected parameters first
|
||||
if use_base_config and training.selected_model:
|
||||
try:
|
||||
base_config = load_base_config(training.selected_model)
|
||||
config.update(base_config)
|
||||
print(f'Loaded base config for {training.selected_model}: {list(base_config.keys())}')
|
||||
except Exception as e:
|
||||
print(f'Warning: Could not load base config for {training.selected_model}: {e}')
|
||||
print('Falling back to custom settings only')
|
||||
|
||||
# Get size arrays from TrainingSize table (3NF)
|
||||
from models.TrainingSize import TrainingSize
|
||||
|
||||
def get_size_array(training_id, size_type):
|
||||
"""Helper to get size array from TrainingSize table"""
|
||||
sizes = TrainingSize.query.filter_by(
|
||||
training_id=training_id,
|
||||
size_type=size_type
|
||||
).order_by(TrainingSize.value_order).all()
|
||||
return [s.value for s in sizes] if sizes else None
|
||||
|
||||
input_size = get_size_array(training.id, 'input_size')
|
||||
test_size = get_size_array(training.id, 'test_size')
|
||||
mosaic_scale = get_size_array(training.id, 'mosaic_scale')
|
||||
mixup_scale = get_size_array(training.id, 'mixup_scale')
|
||||
|
||||
# Override with user-defined values from training table (only if they exist and are not None)
|
||||
user_overrides = {
|
||||
'depth': training.depth,
|
||||
'width': training.width,
|
||||
'input_size': input_size,
|
||||
'mosaic_scale': mosaic_scale,
|
||||
'test_size': test_size,
|
||||
'enable_mixup': training.enable_mixup,
|
||||
'max_epoch': training.max_epoch,
|
||||
'warmup_epochs': training.warmup_epochs,
|
||||
'warmup_lr': training.warmup_lr,
|
||||
'basic_lr_per_img': training.basic_lr_per_img,
|
||||
'scheduler': training.scheduler,
|
||||
'no_aug_epochs': training.no_aug_epochs,
|
||||
'min_lr_ratio': training.min_lr_ratio,
|
||||
'ema': training.ema,
|
||||
'weight_decay': training.weight_decay,
|
||||
'momentum': training.momentum,
|
||||
'print_interval': training.print_interval,
|
||||
'eval_interval': training.eval_interval,
|
||||
'test_conf': training.test_conf,
|
||||
'nms_thre': training.nms_thre,
|
||||
'mosaic_prob': training.mosaic_prob,
|
||||
'mixup_prob': training.mixup_prob,
|
||||
'hsv_prob': training.hsv_prob,
|
||||
'flip_prob': training.flip_prob,
|
||||
# Convert single values to tuples for YOLOX augmentation parameters
|
||||
'degrees': (training.degrees, training.degrees) if training.degrees is not None and not isinstance(training.degrees, (list, tuple)) else training.degrees,
|
||||
'translate': (training.translate, training.translate) if training.translate is not None and not isinstance(training.translate, (list, tuple)) else training.translate,
|
||||
'shear': (training.shear, training.shear) if training.shear is not None and not isinstance(training.shear, (list, tuple)) else training.shear,
|
||||
'mixup_scale': mixup_scale,
|
||||
'activation': training.activation,
|
||||
}
|
||||
|
||||
# Only override if value is explicitly set (not None)
|
||||
for key, value in user_overrides.items():
|
||||
if value is not None:
|
||||
config[key] = value
|
||||
|
||||
# Apply any additional options overrides
|
||||
config.update(options)
|
||||
|
||||
# Set defaults for any missing required parameters
|
||||
config.setdefault('depth', 1.00)
|
||||
config.setdefault('width', 1.00)
|
||||
config.setdefault('input_size', [640, 640])
|
||||
config.setdefault('mosaic_scale', [0.1, 2])
|
||||
config.setdefault('random_size', [10, 20])
|
||||
config.setdefault('test_size', [640, 640])
|
||||
config.setdefault('enable_mixup', False)
|
||||
config.setdefault('exp_name', 'inference_exp')
|
||||
|
||||
# Prepare data_dir for template - escape backslashes and remove trailing separator
|
||||
data_dir_clean = data_dir.rstrip('/\\')
|
||||
data_dir_escaped = data_dir_clean.replace('\\', '\\\\')
|
||||
|
||||
# Calculate annotations directory (where JSON files are stored)
|
||||
# This is in the output folder, not with the images
|
||||
from models.TrainingProjectDetails import TrainingProjectDetails
|
||||
details = TrainingProjectDetails.query.get(project_details_id)
|
||||
if details:
|
||||
training_project = TrainingProject.query.get(details.project_id)
|
||||
project_name = training_project.title.replace(' ', '_') if training_project and training_project.title else f'project_{details.project_id}'
|
||||
else:
|
||||
project_name = f'project_{project_details_id}'
|
||||
|
||||
training_folder_name = f"{training.exp_name or training.training_name or 'training'}_{training_id}"
|
||||
training_folder_name = training_folder_name.replace(' ', '_')
|
||||
|
||||
output_base_path = get_setting('yolox_output_path', './backend')
|
||||
annotations_parent_dir = os.path.join(output_base_path, project_name, training_folder_name)
|
||||
annotations_parent_escaped = annotations_parent_dir.replace('\\', '\\\\')
|
||||
|
||||
# Build exp content
|
||||
exp_content = f'''#!/usr/bin/env python3
|
||||
# -*- coding:utf-8 -*-
|
||||
# Copyright (c) Megvii, Inc. and its affiliates.
|
||||
|
||||
import os
|
||||
|
||||
from yolox.exp import Exp as MyExp
|
||||
|
||||
|
||||
class Exp(MyExp):
|
||||
def __init__(self):
|
||||
super(Exp, self).__init__()
|
||||
self.data_dir = "{data_dir_escaped}" # Where images are located
|
||||
self.annotations_dir = "{annotations_parent_escaped}" # Where annotation JSONs are located
|
||||
self.train_ann = "{train_ann}"
|
||||
self.val_ann = "{val_ann}"
|
||||
self.test_ann = "{test_ann}"
|
||||
self.num_classes = {num_classes}
|
||||
# Disable train2017 subdirectory - our images are directly in data_dir
|
||||
self.name = ""
|
||||
# Set data workers for training
|
||||
self.data_num_workers = 8
|
||||
'''
|
||||
|
||||
# Set pretrained_ckpt if transfer_learning is 'coco'
|
||||
if training.transfer_learning and isinstance(training.transfer_learning, str) and training.transfer_learning.lower() == 'coco':
|
||||
yolox_base_dir = '/home/kitraining/Yolox/YOLOX-main'
|
||||
selected_model = training.selected_model.replace('.pth', '') if training.selected_model else ''
|
||||
if selected_model:
|
||||
exp_content += f" self.pretrained_ckpt = r'{yolox_base_dir}/pretrained/{selected_model}.pth'\n"
|
||||
|
||||
# Format arrays
|
||||
def format_value(val):
|
||||
if isinstance(val, (list, tuple)):
|
||||
# Convert float values to int for size-related parameters
|
||||
formatted_items = []
|
||||
for item in val:
|
||||
# Convert to int if it's a whole number float
|
||||
if isinstance(item, float) and item.is_integer():
|
||||
formatted_items.append(str(int(item)))
|
||||
else:
|
||||
formatted_items.append(str(item))
|
||||
return '(' + ', '.join(formatted_items) + ')'
|
||||
elif isinstance(val, bool):
|
||||
return str(val)
|
||||
elif isinstance(val, str):
|
||||
return f'"{val}"'
|
||||
elif isinstance(val, float) and val.is_integer():
|
||||
# Convert whole number floats to ints
|
||||
return str(int(val))
|
||||
else:
|
||||
return str(val)
|
||||
|
||||
# Add all config parameters to exp
|
||||
for key, value in config.items():
|
||||
if key not in ['exp_name']: # exp_name is handled separately
|
||||
exp_content += f" self.{key} = {format_value(value)}\n"
|
||||
|
||||
# Add get_dataset override using name parameter for image directory
|
||||
exp_content += '''
|
||||
def get_dataset(self, cache=False, cache_type="ram"):
|
||||
"""Override to use name parameter for images directory"""
|
||||
from yolox.data import COCODataset
|
||||
|
||||
# COCODataset constructs image paths as: os.path.join(data_dir, name, file_name)
|
||||
# YOLOX adds "annotations/" to data_dir automatically, so we pass annotations_dir directly
|
||||
# Use empty string for name since we have absolute paths in JSON
|
||||
return COCODataset(
|
||||
data_dir=self.annotations_dir,
|
||||
json_file=self.train_ann,
|
||||
name="",
|
||||
img_size=self.input_size,
|
||||
preproc=self.preproc if hasattr(self, 'preproc') else None,
|
||||
cache=cache,
|
||||
cache_type=cache_type,
|
||||
)
|
||||
|
||||
def get_eval_dataset(self, **kwargs):
|
||||
"""Override eval dataset using name parameter"""
|
||||
from yolox.data import COCODataset
|
||||
|
||||
testdev = kwargs.get("testdev", False)
|
||||
legacy = kwargs.get("legacy", False)
|
||||
|
||||
return COCODataset(
|
||||
data_dir=self.annotations_dir,
|
||||
json_file=self.val_ann if not testdev else self.test_ann,
|
||||
name="",
|
||||
img_size=self.test_size,
|
||||
preproc=None, # No preprocessing for evaluation
|
||||
)
|
||||
'''
|
||||
|
||||
# Add exp_name at the end (uses dynamic path)
|
||||
exp_content += f''' self.exp_name = os.path.split(os.path.realpath(__file__))[1].split(".")[0]
|
||||
'''
|
||||
|
||||
return exp_content
|
||||
|
||||
def save_yolox_inference_exp(training_id, out_path, options=None):
|
||||
"""Save inference exp.py to custom path"""
|
||||
exp_content = generate_yolox_inference_exp(training_id, options, use_base_config=False)
|
||||
with open(out_path, 'w') as f:
|
||||
f.write(exp_content)
|
||||
return out_path
|
||||
|
||||
@@ -1,48 +1,48 @@
|
||||
const Training = require('../models/training.js');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
async function pushYoloxExpToDb(settings) {
|
||||
// Normalize boolean and array fields for DB
|
||||
const normalized = { ...settings };
|
||||
// Map 'act' from frontend to 'activation' for DB
|
||||
if (normalized.act !== undefined) {
|
||||
normalized.activation = normalized.act;
|
||||
delete normalized.act;
|
||||
}
|
||||
// Convert 'on'/'off' to boolean for save_history_ckpt
|
||||
if (typeof normalized.save_history_ckpt === 'string') {
|
||||
normalized.save_history_ckpt = normalized.save_history_ckpt === 'on' ? true : false;
|
||||
}
|
||||
// Convert comma-separated strings to arrays for input_size, test_size, mosaic_scale, mixup_scale
|
||||
['input_size', 'test_size', 'mosaic_scale', 'mixup_scale'].forEach(key => {
|
||||
if (typeof normalized[key] === 'string') {
|
||||
const arr = normalized[key].split(',').map(v => parseFloat(v.trim()));
|
||||
normalized[key] = arr.length === 1 ? arr[0] : arr;
|
||||
}
|
||||
});
|
||||
// Find TrainingProjectDetails for this project
|
||||
const TrainingProjectDetails = require('../models/TrainingProjectDetails.js');
|
||||
const details = await TrainingProjectDetails.findOne({ where: { project_id: normalized.project_id } });
|
||||
if (!details) throw new Error('TrainingProjectDetails not found for project_id ' + normalized.project_id);
|
||||
normalized.project_details_id = details.id;
|
||||
// Create DB row
|
||||
const training = await Training.create(normalized);
|
||||
return training;
|
||||
}
|
||||
|
||||
async function generateYoloxExpFromDb(trainingId) {
|
||||
// Fetch training row from DB
|
||||
const training = await Training.findByPk(trainingId);
|
||||
if (!training) throw new Error('Training not found');
|
||||
// Template for exp.py
|
||||
const expTemplate = `#!/usr/bin/env python3\n# Copyright (c) Megvii Inc. All rights reserved.\n\nimport os\nimport random\n\nimport torch\nimport torch.distributed as dist\nimport torch.nn as nn\n\nfrom .base_exp import BaseExp\n\n__all__ = [\"Exp\", \"check_exp_value\"]\n\nclass Exp(BaseExp):\n def __init__(self):\n super().__init__()\n\n # ---------------- model config ---------------- #\n self.num_classes = ${training.num_classes || 80}\n self.depth = ${training.depth || 1.00}\n self.width = ${training.width || 1.00}\n self.act = \"${training.activation || training.act || 'silu'}\"\n\n # ---------------- dataloader config ---------------- #\n self.data_num_workers = ${training.data_num_workers || 4}\n self.input_size = (${Array.isArray(training.input_size) ? training.input_size.join(', ') : '640, 640'})\n self.multiscale_range = ${training.multiscale_range || 5}\n self.data_dir = ${training.data_dir ? `\"${training.data_dir}\"` : 'None'}\n self.train_ann = \"${training.train_ann || 'instances_train2017.json'}\"\n self.val_ann = \"${training.val_ann || 'instances_val2017.json'}\"\n self.test_ann = \"${training.test_ann || 'instances_test2017.json'}\"\n\n # --------------- transform config ----------------- #\n self.mosaic_prob = ${training.mosaic_prob !== undefined ? training.mosaic_prob : 1.0}\n self.mixup_prob = ${training.mixup_prob !== undefined ? training.mixup_prob : 1.0}\n self.hsv_prob = ${training.hsv_prob !== undefined ? training.hsv_prob : 1.0}\n self.flip_prob = ${training.flip_prob !== undefined ? training.flip_prob : 0.5}\n self.degrees = ${training.degrees !== undefined ? training.degrees : 10.0}\n self.translate = ${training.translate !== undefined ? training.translate : 0.1}\n self.mosaic_scale = (${Array.isArray(training.mosaic_scale) ? training.mosaic_scale.join(', ') : '0.1, 2'})\n self.enable_mixup = ${training.enable_mixup !== undefined ? training.enable_mixup : true}\n self.mixup_scale = (${Array.isArray(training.mixup_scale) ? training.mixup_scale.join(', ') : '0.5, 1.5'})\n self.shear = ${training.shear !== undefined ? training.shear : 2.0}\n\n # -------------- training config --------------------- #\n self.warmup_epochs = ${training.warmup_epochs !== undefined ? training.warmup_epochs : 5}\n self.max_epoch = ${training.max_epoch !== undefined ? training.max_epoch : 300}\n self.warmup_lr = ${training.warmup_lr !== undefined ? training.warmup_lr : 0}\n self.min_lr_ratio = ${training.min_lr_ratio !== undefined ? training.min_lr_ratio : 0.05}\n self.basic_lr_per_img = ${training.basic_lr_per_img !== undefined ? training.basic_lr_per_img : 0.01 / 64.0}\n self.scheduler = \"${training.scheduler || 'yoloxwarmcos'}\"\n self.no_aug_epochs = ${training.no_aug_epochs !== undefined ? training.no_aug_epochs : 15}\n self.ema = ${training.ema !== undefined ? training.ema : true}\n self.weight_decay = ${training.weight_decay !== undefined ? training.weight_decay : 5e-4}\n self.momentum = ${training.momentum !== undefined ? training.momentum : 0.9}\n self.print_interval = ${training.print_interval !== undefined ? training.print_interval : 10}\n self.eval_interval = ${training.eval_interval !== undefined ? training.eval_interval : 10}\n self.save_history_ckpt = ${training.save_history_ckpt !== undefined ? training.save_history_ckpt : true}\n self.exp_name = os.path.split(os.path.realpath(__file__))[1].split(\".\")[0]\n\n # ----------------- testing config ------------------ #\n self.test_size = (${Array.isArray(training.test_size) ? training.test_size.join(', ') : '640, 640'})\n self.test_conf = ${training.test_conf !== undefined ? training.test_conf : 0.01}\n self.nmsthre = ${training.nmsthre !== undefined ? training.nmsthre : 0.65}\n\n # ... rest of the template ...\n\ndef check_exp_value(exp: Exp):\n h, w = exp.input_size\n assert h % 32 == 0 and w % 32 == 0, \"input size must be multiples of 32\"\n`;
|
||||
// Save to file in output directory
|
||||
const outDir = path.join(__dirname, '../../', training.project_id ? `project_${training.project_id}/${trainingId}` : 'exp_files');
|
||||
if (!fs.existsSync(outDir)) fs.mkdirSync(outDir, { recursive: true });
|
||||
const filePath = path.join(outDir, 'exp.py');
|
||||
fs.writeFileSync(filePath, expTemplate);
|
||||
return filePath;
|
||||
}
|
||||
|
||||
const Training = require('../models/training.js');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
async function pushYoloxExpToDb(settings) {
|
||||
// Normalize boolean and array fields for DB
|
||||
const normalized = { ...settings };
|
||||
// Map 'act' from frontend to 'activation' for DB
|
||||
if (normalized.act !== undefined) {
|
||||
normalized.activation = normalized.act;
|
||||
delete normalized.act;
|
||||
}
|
||||
// Convert 'on'/'off' to boolean for save_history_ckpt
|
||||
if (typeof normalized.save_history_ckpt === 'string') {
|
||||
normalized.save_history_ckpt = normalized.save_history_ckpt === 'on' ? true : false;
|
||||
}
|
||||
// Convert comma-separated strings to arrays for input_size, test_size, mosaic_scale, mixup_scale
|
||||
['input_size', 'test_size', 'mosaic_scale', 'mixup_scale'].forEach(key => {
|
||||
if (typeof normalized[key] === 'string') {
|
||||
const arr = normalized[key].split(',').map(v => parseFloat(v.trim()));
|
||||
normalized[key] = arr.length === 1 ? arr[0] : arr;
|
||||
}
|
||||
});
|
||||
// Find TrainingProjectDetails for this project
|
||||
const TrainingProjectDetails = require('../models/TrainingProjectDetails.js');
|
||||
const details = await TrainingProjectDetails.findOne({ where: { project_id: normalized.project_id } });
|
||||
if (!details) throw new Error('TrainingProjectDetails not found for project_id ' + normalized.project_id);
|
||||
normalized.project_details_id = details.id;
|
||||
// Create DB row
|
||||
const training = await Training.create(normalized);
|
||||
return training;
|
||||
}
|
||||
|
||||
async function generateYoloxExpFromDb(trainingId) {
|
||||
// Fetch training row from DB
|
||||
const training = await Training.findByPk(trainingId);
|
||||
if (!training) throw new Error('Training not found');
|
||||
// Template for exp.py
|
||||
const expTemplate = `#!/usr/bin/env python3\n# Copyright (c) Megvii Inc. All rights reserved.\n\nimport os\nimport random\n\nimport torch\nimport torch.distributed as dist\nimport torch.nn as nn\n\nfrom .base_exp import BaseExp\n\n__all__ = [\"Exp\", \"check_exp_value\"]\n\nclass Exp(BaseExp):\n def __init__(self):\n super().__init__()\n\n # ---------------- model config ---------------- #\n self.num_classes = ${training.num_classes || 80}\n self.depth = ${training.depth || 1.00}\n self.width = ${training.width || 1.00}\n self.act = \"${training.activation || training.act || 'silu'}\"\n\n # ---------------- dataloader config ---------------- #\n self.data_num_workers = ${training.data_num_workers || 4}\n self.input_size = (${Array.isArray(training.input_size) ? training.input_size.join(', ') : '640, 640'})\n self.multiscale_range = ${training.multiscale_range || 5}\n self.data_dir = ${training.data_dir ? `\"${training.data_dir}\"` : 'None'}\n self.train_ann = \"${training.train_ann || 'instances_train2017.json'}\"\n self.val_ann = \"${training.val_ann || 'instances_val2017.json'}\"\n self.test_ann = \"${training.test_ann || 'instances_test2017.json'}\"\n\n # --------------- transform config ----------------- #\n self.mosaic_prob = ${training.mosaic_prob !== undefined ? training.mosaic_prob : 1.0}\n self.mixup_prob = ${training.mixup_prob !== undefined ? training.mixup_prob : 1.0}\n self.hsv_prob = ${training.hsv_prob !== undefined ? training.hsv_prob : 1.0}\n self.flip_prob = ${training.flip_prob !== undefined ? training.flip_prob : 0.5}\n self.degrees = ${training.degrees !== undefined ? training.degrees : 10.0}\n self.translate = ${training.translate !== undefined ? training.translate : 0.1}\n self.mosaic_scale = (${Array.isArray(training.mosaic_scale) ? training.mosaic_scale.join(', ') : '0.1, 2'})\n self.enable_mixup = ${training.enable_mixup !== undefined ? training.enable_mixup : true}\n self.mixup_scale = (${Array.isArray(training.mixup_scale) ? training.mixup_scale.join(', ') : '0.5, 1.5'})\n self.shear = ${training.shear !== undefined ? training.shear : 2.0}\n\n # -------------- training config --------------------- #\n self.warmup_epochs = ${training.warmup_epochs !== undefined ? training.warmup_epochs : 5}\n self.max_epoch = ${training.max_epoch !== undefined ? training.max_epoch : 300}\n self.warmup_lr = ${training.warmup_lr !== undefined ? training.warmup_lr : 0}\n self.min_lr_ratio = ${training.min_lr_ratio !== undefined ? training.min_lr_ratio : 0.05}\n self.basic_lr_per_img = ${training.basic_lr_per_img !== undefined ? training.basic_lr_per_img : 0.01 / 64.0}\n self.scheduler = \"${training.scheduler || 'yoloxwarmcos'}\"\n self.no_aug_epochs = ${training.no_aug_epochs !== undefined ? training.no_aug_epochs : 15}\n self.ema = ${training.ema !== undefined ? training.ema : true}\n self.weight_decay = ${training.weight_decay !== undefined ? training.weight_decay : 5e-4}\n self.momentum = ${training.momentum !== undefined ? training.momentum : 0.9}\n self.print_interval = ${training.print_interval !== undefined ? training.print_interval : 10}\n self.eval_interval = ${training.eval_interval !== undefined ? training.eval_interval : 10}\n self.save_history_ckpt = ${training.save_history_ckpt !== undefined ? training.save_history_ckpt : true}\n self.exp_name = os.path.split(os.path.realpath(__file__))[1].split(\".\")[0]\n\n # ----------------- testing config ------------------ #\n self.test_size = (${Array.isArray(training.test_size) ? training.test_size.join(', ') : '640, 640'})\n self.test_conf = ${training.test_conf !== undefined ? training.test_conf : 0.01}\n self.nmsthre = ${training.nmsthre !== undefined ? training.nmsthre : 0.65}\n\n # ... rest of the template ...\n\ndef check_exp_value(exp: Exp):\n h, w = exp.input_size\n assert h % 32 == 0 and w % 32 == 0, \"input size must be multiples of 32\"\n`;
|
||||
// Save to file in output directory
|
||||
const outDir = path.join(__dirname, '../../', training.project_id ? `project_${training.project_id}/${trainingId}` : 'exp_files');
|
||||
if (!fs.existsSync(outDir)) fs.mkdirSync(outDir, { recursive: true });
|
||||
const filePath = path.join(outDir, 'exp.py');
|
||||
fs.writeFileSync(filePath, expTemplate);
|
||||
return filePath;
|
||||
}
|
||||
|
||||
module.exports = { pushYoloxExpToDb, generateYoloxExpFromDb };
|
||||
@@ -1,92 +1,113 @@
|
||||
from models.training import Training
|
||||
from models.TrainingProjectDetails import TrainingProjectDetails
|
||||
from database.database import db
|
||||
|
||||
def push_yolox_exp_to_db(settings):
|
||||
"""Save YOLOX settings to database"""
|
||||
normalized = dict(settings)
|
||||
|
||||
# Map common frontend aliases to DB column names
|
||||
alias_map = {
|
||||
'act': 'activation',
|
||||
'nmsthre': 'nms_thre',
|
||||
'select_model': 'selected_model'
|
||||
}
|
||||
for a, b in alias_map.items():
|
||||
if a in normalized and b not in normalized:
|
||||
normalized[b] = normalized.pop(a)
|
||||
|
||||
# Convert 'on'/'off' or 'true'/'false' strings to boolean for known boolean fields
|
||||
for bool_field in ['save_history_ckpt', 'ema', 'enable_mixup']:
|
||||
if bool_field in normalized:
|
||||
val = normalized[bool_field]
|
||||
if isinstance(val, str):
|
||||
normalized[bool_field] = val.lower() in ('1', 'true', 'on')
|
||||
else:
|
||||
normalized[bool_field] = bool(val)
|
||||
|
||||
# Convert comma-separated strings to arrays for JSON fields
|
||||
for key in ['input_size', 'test_size', 'mosaic_scale', 'mixup_scale']:
|
||||
if key in normalized and isinstance(normalized[key], str):
|
||||
parts = [p.strip() for p in normalized[key].split(',') if p.strip()]
|
||||
try:
|
||||
arr = [float(p) for p in parts]
|
||||
except Exception:
|
||||
arr = parts
|
||||
normalized[key] = arr[0] if len(arr) == 1 else arr
|
||||
|
||||
# Ensure we have a TrainingProjectDetails row for project_id
|
||||
project_id = normalized.get('project_id')
|
||||
if not project_id:
|
||||
raise Exception('Missing project_id in settings')
|
||||
details = TrainingProjectDetails.query.filter_by(project_id=project_id).first()
|
||||
if not details:
|
||||
raise Exception(f'TrainingProjectDetails not found for project_id {project_id}')
|
||||
normalized['project_details_id'] = details.id
|
||||
|
||||
# Filter normalized to only columns that exist on the Training model
|
||||
valid_cols = {c.name: c for c in Training.__table__.columns}
|
||||
filtered = {}
|
||||
for k, v in normalized.items():
|
||||
if k in valid_cols:
|
||||
col_type = valid_cols[k].type.__class__.__name__
|
||||
# Try to coerce types for numeric/boolean columns
|
||||
try:
|
||||
if 'Integer' in col_type:
|
||||
if v is None or v == '':
|
||||
filtered[k] = None
|
||||
else:
|
||||
filtered[k] = int(float(v))
|
||||
elif 'Float' in col_type:
|
||||
if v is None or v == '':
|
||||
filtered[k] = None
|
||||
else:
|
||||
filtered[k] = float(v)
|
||||
elif 'Boolean' in col_type:
|
||||
if isinstance(v, str):
|
||||
filtered[k] = v.lower() in ('1', 'true', 'on')
|
||||
else:
|
||||
filtered[k] = bool(v)
|
||||
elif 'JSON' in col_type:
|
||||
filtered[k] = v
|
||||
elif 'LargeBinary' in col_type:
|
||||
# If a file path was passed, store its bytes; otherwise store raw bytes
|
||||
if isinstance(v, str):
|
||||
try:
|
||||
filtered[k] = v.encode('utf-8')
|
||||
except Exception:
|
||||
filtered[k] = None
|
||||
else:
|
||||
filtered[k] = v
|
||||
else:
|
||||
filtered[k] = v
|
||||
except Exception:
|
||||
# If conversion fails, just assign raw value
|
||||
filtered[k] = v
|
||||
|
||||
# Create DB row
|
||||
training = Training(**filtered)
|
||||
db.session.add(training)
|
||||
db.session.commit()
|
||||
|
||||
return training
|
||||
from models.training import Training
|
||||
from models.TrainingProjectDetails import TrainingProjectDetails
|
||||
from models.TrainingSize import TrainingSize
|
||||
from database.database import db
|
||||
|
||||
def push_yolox_exp_to_db(settings):
|
||||
"""Save YOLOX settings to database"""
|
||||
normalized = dict(settings)
|
||||
|
||||
# Map common frontend aliases to DB column names
|
||||
alias_map = {
|
||||
'act': 'activation',
|
||||
'nmsthre': 'nms_thre',
|
||||
'select_model': 'selected_model'
|
||||
}
|
||||
for a, b in alias_map.items():
|
||||
if a in normalized and b not in normalized:
|
||||
normalized[b] = normalized.pop(a)
|
||||
|
||||
# Convert 'on'/'off' or 'true'/'false' strings to boolean for known boolean fields
|
||||
for bool_field in ['save_history_ckpt', 'ema', 'enable_mixup']:
|
||||
if bool_field in normalized:
|
||||
val = normalized[bool_field]
|
||||
if isinstance(val, str):
|
||||
normalized[bool_field] = val.lower() in ('1', 'true', 'on')
|
||||
else:
|
||||
normalized[bool_field] = bool(val)
|
||||
|
||||
# Extract size arrays for separate TrainingSize table (3NF)
|
||||
size_arrays = {}
|
||||
for key in ['input_size', 'test_size', 'mosaic_scale', 'mixup_scale']:
|
||||
if key in normalized:
|
||||
if isinstance(normalized[key], str):
|
||||
parts = [p.strip() for p in normalized[key].split(',') if p.strip()]
|
||||
try:
|
||||
arr = [float(p) for p in parts]
|
||||
except Exception:
|
||||
arr = parts
|
||||
size_arrays[key] = arr[0] if len(arr) == 1 else (arr if isinstance(arr, list) else [arr])
|
||||
elif isinstance(normalized[key], list):
|
||||
size_arrays[key] = normalized[key]
|
||||
elif normalized[key] is not None:
|
||||
size_arrays[key] = [float(normalized[key])]
|
||||
# Remove from normalized dict since it won't be stored in training table
|
||||
del normalized[key]
|
||||
|
||||
# Ensure we have a TrainingProjectDetails row for project_id
|
||||
project_id = normalized.get('project_id')
|
||||
if not project_id:
|
||||
raise Exception('Missing project_id in settings')
|
||||
details = TrainingProjectDetails.query.filter_by(project_id=project_id).first()
|
||||
if not details:
|
||||
raise Exception(f'TrainingProjectDetails not found for project_id {project_id}')
|
||||
normalized['project_details_id'] = details.id
|
||||
|
||||
# Filter normalized to only columns that exist on the Training model
|
||||
valid_cols = {c.name: c for c in Training.__table__.columns}
|
||||
filtered = {}
|
||||
for k, v in normalized.items():
|
||||
if k in valid_cols:
|
||||
col_type = valid_cols[k].type.__class__.__name__
|
||||
# Try to coerce types for numeric/boolean columns
|
||||
try:
|
||||
if 'Integer' in col_type:
|
||||
if v is None or v == '':
|
||||
filtered[k] = None
|
||||
else:
|
||||
filtered[k] = int(float(v))
|
||||
elif 'Float' in col_type:
|
||||
if v is None or v == '':
|
||||
filtered[k] = None
|
||||
else:
|
||||
filtered[k] = float(v)
|
||||
elif 'Boolean' in col_type:
|
||||
if isinstance(v, str):
|
||||
filtered[k] = v.lower() in ('1', 'true', 'on')
|
||||
else:
|
||||
filtered[k] = bool(v)
|
||||
elif 'LargeBinary' in col_type:
|
||||
# If a file path was passed, store its bytes; otherwise store raw bytes
|
||||
if isinstance(v, str):
|
||||
try:
|
||||
filtered[k] = v.encode('utf-8')
|
||||
except Exception:
|
||||
filtered[k] = None
|
||||
else:
|
||||
filtered[k] = v
|
||||
else:
|
||||
filtered[k] = v
|
||||
except Exception:
|
||||
# If conversion fails, just assign raw value
|
||||
filtered[k] = v
|
||||
|
||||
# Create DB row
|
||||
training = Training(**filtered)
|
||||
db.session.add(training)
|
||||
db.session.flush() # Get training.id
|
||||
|
||||
# Save size arrays to TrainingSize table (3NF)
|
||||
for size_type, values in size_arrays.items():
|
||||
if values and isinstance(values, list):
|
||||
for order, value in enumerate(values):
|
||||
size_record = TrainingSize(
|
||||
training_id=training.id,
|
||||
size_type=size_type,
|
||||
value_order=order,
|
||||
value=float(value)
|
||||
)
|
||||
db.session.add(size_record)
|
||||
|
||||
db.session.commit()
|
||||
|
||||
return training
|
||||
|
||||
@@ -1,120 +1,120 @@
|
||||
const sequelize = require('../database/database.js');
|
||||
const { Project, Img, Ann } = require('../models');
|
||||
const { fetchLableStudioProject, fetchProjectIdsAndTitles } = require('./fetch-labelstudio.js');
|
||||
|
||||
const updateStatus = { running: false };
|
||||
|
||||
async function seedLabelStudio() {
|
||||
updateStatus.running = true;
|
||||
console.log('Seeding started');
|
||||
try {
|
||||
await sequelize.sync();
|
||||
const projects = await fetchProjectIdsAndTitles();
|
||||
|
||||
for (const project of projects) {
|
||||
console.log(`Processing project ${project.id} (${project.title})`);
|
||||
|
||||
// Upsert project in DB
|
||||
await Project.upsert({ project_id: project.id, title: project.title });
|
||||
|
||||
// Fetch project data (annotations array)
|
||||
const data = await fetchLableStudioProject(project.id);
|
||||
if (!Array.isArray(data) || data.length === 0) {
|
||||
console.log(`No annotation data for project ${project.id}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Remove old images and annotations for this project
|
||||
const oldImages = await Img.findAll({ where: { project_id: project.id } });
|
||||
const oldImageIds = oldImages.map(img => img.image_id);
|
||||
if (oldImageIds.length > 0) {
|
||||
await Ann.destroy({ where: { image_id: oldImageIds } });
|
||||
await Img.destroy({ where: { project_id: project.id } });
|
||||
console.log(`Deleted ${oldImageIds.length} old images and their annotations for project ${project.id}`);
|
||||
}
|
||||
|
||||
// Prepare arrays
|
||||
const imagesBulk = [];
|
||||
const annsBulk = [];
|
||||
|
||||
for (const ann of data) {
|
||||
// Extract width/height
|
||||
let width = null;
|
||||
let height = null;
|
||||
if (Array.isArray(ann.label_rectangles) && ann.label_rectangles.length > 0) {
|
||||
width = ann.label_rectangles[0].original_width;
|
||||
height = ann.label_rectangles[0].original_height;
|
||||
} else if (Array.isArray(ann.label) && ann.label.length > 0 && ann.label[0].original_width && ann.label[0].original_height) {
|
||||
width = ann.label[0].original_width;
|
||||
height = ann.label[0].original_height;
|
||||
}
|
||||
|
||||
// Only push image and annotations if width and height are valid
|
||||
if (width && height) {
|
||||
imagesBulk.push({
|
||||
project_id: project.id,
|
||||
image_path: ann.image,
|
||||
width,
|
||||
height
|
||||
});
|
||||
|
||||
// Handle multiple annotations per image
|
||||
if (Array.isArray(ann.label_rectangles)) {
|
||||
for (const ann_detail of ann.label_rectangles) {
|
||||
annsBulk.push({
|
||||
image_path: ann.image,
|
||||
x: (ann_detail.x * width) / 100,
|
||||
y: (ann_detail.y * height) / 100,
|
||||
width: (ann_detail.width * width) / 100,
|
||||
height: (ann_detail.height * height) / 100,
|
||||
Label: Array.isArray(ann_detail.rectanglelabels) ? (ann_detail.rectanglelabels[0] || 'unknown') : (ann_detail.rectanglelabels || 'unknown')
|
||||
});
|
||||
}
|
||||
} else if (Array.isArray(ann.label)) {
|
||||
for (const ann_detail of ann.label) {
|
||||
annsBulk.push({
|
||||
image_path: ann.image,
|
||||
x: (ann_detail.x * width) / 100,
|
||||
y: (ann_detail.y * height) / 100,
|
||||
width: (ann_detail.width * width) / 100,
|
||||
height: (ann_detail.height * height) / 100,
|
||||
Label: Array.isArray(ann_detail.rectanglelabels) ? (ann_detail.rectanglelabels[0] || 'unknown') : (ann_detail.rectanglelabels || 'unknown')
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 1) Insert images and get generated IDs
|
||||
const insertedImages = await Img.bulkCreate(imagesBulk, { returning: true });
|
||||
|
||||
// 2) Map image_path -> image_id
|
||||
const imageMap = {};
|
||||
for (const img of insertedImages) {
|
||||
imageMap[img.image_path] = img.image_id;
|
||||
}
|
||||
|
||||
// 3) Assign correct image_id to each annotation
|
||||
for (const ann of annsBulk) {
|
||||
ann.image_id = imageMap[ann.image_path];
|
||||
delete ann.image_path; // cleanup
|
||||
}
|
||||
|
||||
// 4) Insert annotations
|
||||
await Ann.bulkCreate(annsBulk);
|
||||
|
||||
console.log(`Inserted ${imagesBulk.length} images and ${annsBulk.length} annotations for project ${project.id}`);
|
||||
}
|
||||
|
||||
console.log('Seeding done');
|
||||
return { success: true, message: 'Data inserted successfully!' };
|
||||
} catch (error) {
|
||||
console.error('Error inserting data:', error);
|
||||
return { success: false, message: error.message };
|
||||
} finally {
|
||||
updateStatus.running = false;
|
||||
console.log('updateStatus.running set to false');
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { seedLabelStudio, updateStatus };
|
||||
const sequelize = require('../database/database.js');
|
||||
const { Project, Img, Ann } = require('../models');
|
||||
const { fetchLableStudioProject, fetchProjectIdsAndTitles } = require('./fetch-labelstudio.js');
|
||||
|
||||
const updateStatus = { running: false };
|
||||
|
||||
async function seedLabelStudio() {
|
||||
updateStatus.running = true;
|
||||
console.log('Seeding started');
|
||||
try {
|
||||
await sequelize.sync();
|
||||
const projects = await fetchProjectIdsAndTitles();
|
||||
|
||||
for (const project of projects) {
|
||||
console.log(`Processing project ${project.id} (${project.title})`);
|
||||
|
||||
// Upsert project in DB
|
||||
await Project.upsert({ project_id: project.id, title: project.title });
|
||||
|
||||
// Fetch project data (annotations array)
|
||||
const data = await fetchLableStudioProject(project.id);
|
||||
if (!Array.isArray(data) || data.length === 0) {
|
||||
console.log(`No annotation data for project ${project.id}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Remove old images and annotations for this project
|
||||
const oldImages = await Img.findAll({ where: { project_id: project.id } });
|
||||
const oldImageIds = oldImages.map(img => img.image_id);
|
||||
if (oldImageIds.length > 0) {
|
||||
await Ann.destroy({ where: { image_id: oldImageIds } });
|
||||
await Img.destroy({ where: { project_id: project.id } });
|
||||
console.log(`Deleted ${oldImageIds.length} old images and their annotations for project ${project.id}`);
|
||||
}
|
||||
|
||||
// Prepare arrays
|
||||
const imagesBulk = [];
|
||||
const annsBulk = [];
|
||||
|
||||
for (const ann of data) {
|
||||
// Extract width/height
|
||||
let width = null;
|
||||
let height = null;
|
||||
if (Array.isArray(ann.label_rectangles) && ann.label_rectangles.length > 0) {
|
||||
width = ann.label_rectangles[0].original_width;
|
||||
height = ann.label_rectangles[0].original_height;
|
||||
} else if (Array.isArray(ann.label) && ann.label.length > 0 && ann.label[0].original_width && ann.label[0].original_height) {
|
||||
width = ann.label[0].original_width;
|
||||
height = ann.label[0].original_height;
|
||||
}
|
||||
|
||||
// Only push image and annotations if width and height are valid
|
||||
if (width && height) {
|
||||
imagesBulk.push({
|
||||
project_id: project.id,
|
||||
image_path: ann.image,
|
||||
width,
|
||||
height
|
||||
});
|
||||
|
||||
// Handle multiple annotations per image
|
||||
if (Array.isArray(ann.label_rectangles)) {
|
||||
for (const ann_detail of ann.label_rectangles) {
|
||||
annsBulk.push({
|
||||
image_path: ann.image,
|
||||
x: (ann_detail.x * width) / 100,
|
||||
y: (ann_detail.y * height) / 100,
|
||||
width: (ann_detail.width * width) / 100,
|
||||
height: (ann_detail.height * height) / 100,
|
||||
Label: Array.isArray(ann_detail.rectanglelabels) ? (ann_detail.rectanglelabels[0] || 'unknown') : (ann_detail.rectanglelabels || 'unknown')
|
||||
});
|
||||
}
|
||||
} else if (Array.isArray(ann.label)) {
|
||||
for (const ann_detail of ann.label) {
|
||||
annsBulk.push({
|
||||
image_path: ann.image,
|
||||
x: (ann_detail.x * width) / 100,
|
||||
y: (ann_detail.y * height) / 100,
|
||||
width: (ann_detail.width * width) / 100,
|
||||
height: (ann_detail.height * height) / 100,
|
||||
Label: Array.isArray(ann_detail.rectanglelabels) ? (ann_detail.rectanglelabels[0] || 'unknown') : (ann_detail.rectanglelabels || 'unknown')
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 1) Insert images and get generated IDs
|
||||
const insertedImages = await Img.bulkCreate(imagesBulk, { returning: true });
|
||||
|
||||
// 2) Map image_path -> image_id
|
||||
const imageMap = {};
|
||||
for (const img of insertedImages) {
|
||||
imageMap[img.image_path] = img.image_id;
|
||||
}
|
||||
|
||||
// 3) Assign correct image_id to each annotation
|
||||
for (const ann of annsBulk) {
|
||||
ann.image_id = imageMap[ann.image_path];
|
||||
delete ann.image_path; // cleanup
|
||||
}
|
||||
|
||||
// 4) Insert annotations
|
||||
await Ann.bulkCreate(annsBulk);
|
||||
|
||||
console.log(`Inserted ${imagesBulk.length} images and ${annsBulk.length} annotations for project ${project.id}`);
|
||||
}
|
||||
|
||||
console.log('Seeding done');
|
||||
return { success: true, message: 'Data inserted successfully!' };
|
||||
} catch (error) {
|
||||
console.error('Error inserting data:', error);
|
||||
return { success: false, message: error.message };
|
||||
} finally {
|
||||
updateStatus.running = false;
|
||||
console.log('updateStatus.running set to false');
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { seedLabelStudio, updateStatus };
|
||||
|
||||
@@ -1,149 +1,149 @@
|
||||
from database.database import db
|
||||
from models.LabelStudioProject import LabelStudioProject
|
||||
from models.Images import Image
|
||||
from models.Annotation import Annotation
|
||||
from services.fetch_labelstudio import fetch_label_studio_project, fetch_project_ids_and_titles
|
||||
|
||||
update_status = {"running": False}
|
||||
|
||||
def seed_label_studio():
|
||||
"""Seed database with Label Studio project data"""
|
||||
update_status["running"] = True
|
||||
print('Seeding started')
|
||||
|
||||
try:
|
||||
projects = fetch_project_ids_and_titles()
|
||||
|
||||
for project in projects:
|
||||
print(f"Processing project {project['id']} ({project['title']})")
|
||||
|
||||
# Upsert project in DB
|
||||
existing_project = LabelStudioProject.query.filter_by(project_id=project['id']).first()
|
||||
if existing_project:
|
||||
existing_project.title = project['title']
|
||||
else:
|
||||
new_project = LabelStudioProject(project_id=project['id'], title=project['title'])
|
||||
db.session.add(new_project)
|
||||
db.session.commit()
|
||||
|
||||
# Fetch project data (annotations array)
|
||||
data = fetch_label_studio_project(project['id'])
|
||||
if not isinstance(data, list) or len(data) == 0:
|
||||
print(f"No annotation data for project {project['id']}")
|
||||
continue
|
||||
|
||||
# Remove old images and annotations for this project
|
||||
old_images = Image.query.filter_by(project_id=project['id']).all()
|
||||
old_image_ids = [img.image_id for img in old_images]
|
||||
|
||||
if old_image_ids:
|
||||
Annotation.query.filter(Annotation.image_id.in_(old_image_ids)).delete(synchronize_session=False)
|
||||
Image.query.filter_by(project_id=project['id']).delete()
|
||||
db.session.commit()
|
||||
print(f"Deleted {len(old_image_ids)} old images and their annotations for project {project['id']}")
|
||||
|
||||
# Prepare arrays
|
||||
images_bulk = []
|
||||
anns_bulk = []
|
||||
|
||||
for ann in data:
|
||||
# Extract width/height
|
||||
width = None
|
||||
height = None
|
||||
|
||||
if isinstance(ann.get('label_rectangles'), list) and len(ann['label_rectangles']) > 0:
|
||||
width = ann['label_rectangles'][0].get('original_width')
|
||||
height = ann['label_rectangles'][0].get('original_height')
|
||||
elif isinstance(ann.get('label'), list) and len(ann['label']) > 0:
|
||||
if ann['label'][0].get('original_width') and ann['label'][0].get('original_height'):
|
||||
width = ann['label'][0]['original_width']
|
||||
height = ann['label'][0]['original_height']
|
||||
|
||||
# Only process if width and height are valid
|
||||
if width and height:
|
||||
image_data = {
|
||||
'project_id': project['id'],
|
||||
'image_path': ann.get('image'),
|
||||
'width': width,
|
||||
'height': height
|
||||
}
|
||||
images_bulk.append(image_data)
|
||||
|
||||
# Handle multiple annotations per image
|
||||
if isinstance(ann.get('label_rectangles'), list):
|
||||
for ann_detail in ann['label_rectangles']:
|
||||
# Get label safely
|
||||
rectanglelabels = ann_detail.get('rectanglelabels', [])
|
||||
if isinstance(rectanglelabels, list) and len(rectanglelabels) > 0:
|
||||
label = rectanglelabels[0]
|
||||
elif isinstance(rectanglelabels, str):
|
||||
label = rectanglelabels
|
||||
else:
|
||||
label = 'unknown'
|
||||
|
||||
ann_data = {
|
||||
'image_path': ann.get('image'),
|
||||
'x': (ann_detail['x'] * width) / 100,
|
||||
'y': (ann_detail['y'] * height) / 100,
|
||||
'width': (ann_detail['width'] * width) / 100,
|
||||
'height': (ann_detail['height'] * height) / 100,
|
||||
'Label': label
|
||||
}
|
||||
anns_bulk.append(ann_data)
|
||||
elif isinstance(ann.get('label'), list):
|
||||
for ann_detail in ann['label']:
|
||||
# Get label safely
|
||||
rectanglelabels = ann_detail.get('rectanglelabels', [])
|
||||
if isinstance(rectanglelabels, list) and len(rectanglelabels) > 0:
|
||||
label = rectanglelabels[0]
|
||||
elif isinstance(rectanglelabels, str):
|
||||
label = rectanglelabels
|
||||
else:
|
||||
label = 'unknown'
|
||||
|
||||
ann_data = {
|
||||
'image_path': ann.get('image'),
|
||||
'x': (ann_detail['x'] * width) / 100,
|
||||
'y': (ann_detail['y'] * height) / 100,
|
||||
'width': (ann_detail['width'] * width) / 100,
|
||||
'height': (ann_detail['height'] * height) / 100,
|
||||
'Label': label
|
||||
}
|
||||
anns_bulk.append(ann_data)
|
||||
|
||||
# Insert images and get generated IDs
|
||||
inserted_images = []
|
||||
for img_data in images_bulk:
|
||||
new_image = Image(**img_data)
|
||||
db.session.add(new_image)
|
||||
db.session.flush() # Flush to get the ID
|
||||
inserted_images.append(new_image)
|
||||
db.session.commit()
|
||||
|
||||
# Map image_path -> image_id
|
||||
image_map = {img.image_path: img.image_id for img in inserted_images}
|
||||
|
||||
# Assign correct image_id to each annotation
|
||||
for ann_data in anns_bulk:
|
||||
ann_data['image_id'] = image_map.get(ann_data['image_path'])
|
||||
del ann_data['image_path']
|
||||
|
||||
# Insert annotations
|
||||
for ann_data in anns_bulk:
|
||||
new_annotation = Annotation(**ann_data)
|
||||
db.session.add(new_annotation)
|
||||
db.session.commit()
|
||||
|
||||
print(f"Inserted {len(images_bulk)} images and {len(anns_bulk)} annotations for project {project['id']}")
|
||||
|
||||
print('Seeding done')
|
||||
return {'success': True, 'message': 'Data inserted successfully!'}
|
||||
|
||||
except Exception as error:
|
||||
print(f'Error inserting data: {error}')
|
||||
db.session.rollback()
|
||||
return {'success': False, 'message': str(error)}
|
||||
|
||||
finally:
|
||||
update_status["running"] = False
|
||||
print('updateStatus.running set to false')
|
||||
from database.database import db
|
||||
from models.LabelStudioProject import LabelStudioProject
|
||||
from models.Images import Image
|
||||
from models.Annotation import Annotation
|
||||
from services.fetch_labelstudio import fetch_label_studio_project, fetch_project_ids_and_titles
|
||||
|
||||
update_status = {"running": False}
|
||||
|
||||
def seed_label_studio():
|
||||
"""Seed database with Label Studio project data"""
|
||||
update_status["running"] = True
|
||||
print('Seeding started')
|
||||
|
||||
try:
|
||||
projects = fetch_project_ids_and_titles()
|
||||
|
||||
for project in projects:
|
||||
print(f"Processing project {project['id']} ({project['title']})")
|
||||
|
||||
# Upsert project in DB
|
||||
existing_project = LabelStudioProject.query.filter_by(project_id=project['id']).first()
|
||||
if existing_project:
|
||||
existing_project.title = project['title']
|
||||
else:
|
||||
new_project = LabelStudioProject(project_id=project['id'], title=project['title'])
|
||||
db.session.add(new_project)
|
||||
db.session.commit()
|
||||
|
||||
# Fetch project data (annotations array)
|
||||
data = fetch_label_studio_project(project['id'])
|
||||
if not isinstance(data, list) or len(data) == 0:
|
||||
print(f"No annotation data for project {project['id']}")
|
||||
continue
|
||||
|
||||
# Remove old images and annotations for this project
|
||||
old_images = Image.query.filter_by(project_id=project['id']).all()
|
||||
old_image_ids = [img.image_id for img in old_images]
|
||||
|
||||
if old_image_ids:
|
||||
Annotation.query.filter(Annotation.image_id.in_(old_image_ids)).delete(synchronize_session=False)
|
||||
Image.query.filter_by(project_id=project['id']).delete()
|
||||
db.session.commit()
|
||||
print(f"Deleted {len(old_image_ids)} old images and their annotations for project {project['id']}")
|
||||
|
||||
# Prepare arrays
|
||||
images_bulk = []
|
||||
anns_bulk = []
|
||||
|
||||
for ann in data:
|
||||
# Extract width/height
|
||||
width = None
|
||||
height = None
|
||||
|
||||
if isinstance(ann.get('label_rectangles'), list) and len(ann['label_rectangles']) > 0:
|
||||
width = ann['label_rectangles'][0].get('original_width')
|
||||
height = ann['label_rectangles'][0].get('original_height')
|
||||
elif isinstance(ann.get('label'), list) and len(ann['label']) > 0:
|
||||
if ann['label'][0].get('original_width') and ann['label'][0].get('original_height'):
|
||||
width = ann['label'][0]['original_width']
|
||||
height = ann['label'][0]['original_height']
|
||||
|
||||
# Only process if width and height are valid
|
||||
if width and height:
|
||||
image_data = {
|
||||
'project_id': project['id'],
|
||||
'image_path': ann.get('image'),
|
||||
'width': int(width), # Ensure integer
|
||||
'height': int(height) # Ensure integer
|
||||
}
|
||||
images_bulk.append(image_data)
|
||||
|
||||
# Handle multiple annotations per image
|
||||
if isinstance(ann.get('label_rectangles'), list):
|
||||
for ann_detail in ann['label_rectangles']:
|
||||
# Get label safely
|
||||
rectanglelabels = ann_detail.get('rectanglelabels', [])
|
||||
if isinstance(rectanglelabels, list) and len(rectanglelabels) > 0:
|
||||
label = rectanglelabels[0]
|
||||
elif isinstance(rectanglelabels, str):
|
||||
label = rectanglelabels
|
||||
else:
|
||||
label = 'unknown'
|
||||
|
||||
ann_data = {
|
||||
'image_path': ann.get('image'),
|
||||
'x': (ann_detail['x'] * width) / 100,
|
||||
'y': (ann_detail['y'] * height) / 100,
|
||||
'width': (ann_detail['width'] * width) / 100,
|
||||
'height': (ann_detail['height'] * height) / 100,
|
||||
'Label': label
|
||||
}
|
||||
anns_bulk.append(ann_data)
|
||||
elif isinstance(ann.get('label'), list):
|
||||
for ann_detail in ann['label']:
|
||||
# Get label safely
|
||||
rectanglelabels = ann_detail.get('rectanglelabels', [])
|
||||
if isinstance(rectanglelabels, list) and len(rectanglelabels) > 0:
|
||||
label = rectanglelabels[0]
|
||||
elif isinstance(rectanglelabels, str):
|
||||
label = rectanglelabels
|
||||
else:
|
||||
label = 'unknown'
|
||||
|
||||
ann_data = {
|
||||
'image_path': ann.get('image'),
|
||||
'x': (ann_detail['x'] * width) / 100,
|
||||
'y': (ann_detail['y'] * height) / 100,
|
||||
'width': (ann_detail['width'] * width) / 100,
|
||||
'height': (ann_detail['height'] * height) / 100,
|
||||
'Label': label
|
||||
}
|
||||
anns_bulk.append(ann_data)
|
||||
|
||||
# Insert images and get generated IDs
|
||||
inserted_images = []
|
||||
for img_data in images_bulk:
|
||||
new_image = Image(**img_data)
|
||||
db.session.add(new_image)
|
||||
db.session.flush() # Flush to get the ID
|
||||
inserted_images.append(new_image)
|
||||
db.session.commit()
|
||||
|
||||
# Map image_path -> image_id
|
||||
image_map = {img.image_path: img.image_id for img in inserted_images}
|
||||
|
||||
# Assign correct image_id to each annotation
|
||||
for ann_data in anns_bulk:
|
||||
ann_data['image_id'] = image_map.get(ann_data['image_path'])
|
||||
del ann_data['image_path']
|
||||
|
||||
# Insert annotations
|
||||
for ann_data in anns_bulk:
|
||||
new_annotation = Annotation(**ann_data)
|
||||
db.session.add(new_annotation)
|
||||
db.session.commit()
|
||||
|
||||
print(f"Inserted {len(images_bulk)} images and {len(anns_bulk)} annotations for project {project['id']}")
|
||||
|
||||
print('Seeding done')
|
||||
return {'success': True, 'message': 'Data inserted successfully!'}
|
||||
|
||||
except Exception as error:
|
||||
print(f'Error inserting data: {error}')
|
||||
db.session.rollback()
|
||||
return {'success': False, 'message': str(error)}
|
||||
|
||||
finally:
|
||||
update_status["running"] = False
|
||||
print('updateStatus.running set to false')
|
||||
|
||||
71
backend/services/settings_service.py
Normal file
71
backend/services/settings_service.py
Normal file
@@ -0,0 +1,71 @@
|
||||
"""
|
||||
Settings Service - Manages global application settings
|
||||
"""
|
||||
from models.Settings import Settings
|
||||
from database.database import db
|
||||
|
||||
def get_setting(key, default=None):
|
||||
"""Get a setting value by key"""
|
||||
setting = Settings.query.filter_by(key=key).first()
|
||||
return setting.value if setting else default
|
||||
|
||||
def set_setting(key, value, description=None):
|
||||
"""Set a setting value"""
|
||||
setting = Settings.query.filter_by(key=key).first()
|
||||
if setting:
|
||||
setting.value = value
|
||||
if description:
|
||||
setting.description = description
|
||||
else:
|
||||
setting = Settings(key=key, value=value, description=description)
|
||||
db.session.add(setting)
|
||||
|
||||
db.session.commit()
|
||||
return setting
|
||||
|
||||
def get_all_settings():
|
||||
"""Get all settings as a dictionary"""
|
||||
settings = Settings.query.all()
|
||||
return {s.key: s.value for s in settings}
|
||||
|
||||
def get_all_settings_detailed():
|
||||
"""Get all settings with full details"""
|
||||
settings = Settings.query.all()
|
||||
return [s.to_dict() for s in settings]
|
||||
|
||||
def initialize_default_settings():
|
||||
"""Initialize default settings if they don't exist"""
|
||||
defaults = {
|
||||
'labelstudio_api_url': {
|
||||
'value': 'http://192.168.1.19:8080/api',
|
||||
'description': 'Label Studio API URL'
|
||||
},
|
||||
'labelstudio_api_token': {
|
||||
'value': 'c1cef980b7c73004f4ee880a42839313b863869f',
|
||||
'description': 'Label Studio API Token'
|
||||
},
|
||||
'yolox_path': {
|
||||
'value': 'C:/YOLOX',
|
||||
'description': 'Path to YOLOX installation directory'
|
||||
},
|
||||
'yolox_venv_path': {
|
||||
'value': '/home/kitraining/Yolox/yolox_venv/bin/activate',
|
||||
'description': 'Path to YOLOX virtual environment activation script'
|
||||
},
|
||||
'yolox_output_path': {
|
||||
'value': './backend',
|
||||
'description': 'Output folder for YOLOX experiment files and JSONs'
|
||||
},
|
||||
'yolox_data_dir': {
|
||||
'value': '/home/kitraining/To_Annotate/',
|
||||
'description': 'Data directory path for YOLOX training (where images are located)'
|
||||
}
|
||||
}
|
||||
|
||||
for key, data in defaults.items():
|
||||
existing = Settings.query.filter_by(key=key).first()
|
||||
if not existing:
|
||||
setting = Settings(key=key, value=data['value'], description=data['description'])
|
||||
db.session.add(setting)
|
||||
|
||||
db.session.commit()
|
||||
@@ -1,14 +1,14 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Start the Flask backend server
|
||||
"""
|
||||
import sys
|
||||
import os
|
||||
|
||||
# Add the backend directory to Python path
|
||||
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
|
||||
|
||||
from app import app
|
||||
|
||||
if __name__ == '__main__':
|
||||
app.run(host='0.0.0.0', port=3000, debug=True)
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Start the Flask backend server
|
||||
"""
|
||||
import sys
|
||||
import os
|
||||
|
||||
# Add the backend directory to Python path
|
||||
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
|
||||
|
||||
from app import app
|
||||
|
||||
if __name__ == '__main__':
|
||||
app.run(host='0.0.0.0', port=3000, debug=True)
|
||||
|
||||
47
backend/test/7/exp.py
Normal file
47
backend/test/7/exp.py
Normal file
@@ -0,0 +1,47 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding:utf-8 -*-
|
||||
# Copyright (c) Megvii, Inc. and its affiliates.
|
||||
|
||||
import os
|
||||
|
||||
from yolox.exp import Exp as MyExp
|
||||
|
||||
|
||||
class Exp(MyExp):
|
||||
def __init__(self):
|
||||
super(Exp, self).__init__()
|
||||
self.data_dir = "/home/kitraining/To_Annotate/"
|
||||
self.train_ann = "coco_project_3_train.json"
|
||||
self.val_ann = "coco_project_3_valid.json"
|
||||
self.test_ann = "coco_project_3_test.json"
|
||||
self.num_classes = 2
|
||||
self.depth = 1.0
|
||||
self.width = 1.0
|
||||
self.input_size = (640.0, 640.0)
|
||||
self.mosaic_scale = (0.1, 2.0)
|
||||
self.test_size = (640.0, 640.0)
|
||||
self.enable_mixup = True
|
||||
self.max_epoch = 300
|
||||
self.warmup_epochs = 5
|
||||
self.warmup_lr = 0.0
|
||||
self.scheduler = "yoloxwarmcos"
|
||||
self.no_aug_epochs = 15
|
||||
self.min_lr_ratio = 0.05
|
||||
self.ema = True
|
||||
self.weight_decay = 0.0005
|
||||
self.momentum = 0.9
|
||||
self.print_interval = 10
|
||||
self.eval_interval = 10
|
||||
self.test_conf = 0.01
|
||||
self.nms_thre = 0.65
|
||||
self.mosaic_prob = 1.0
|
||||
self.mixup_prob = 1.0
|
||||
self.hsv_prob = 1.0
|
||||
self.flip_prob = 0.5
|
||||
self.degrees = 10.0
|
||||
self.translate = 0.1
|
||||
self.shear = 2.0
|
||||
self.mixup_scale = (0.5, 1.5)
|
||||
self.activation = "silu"
|
||||
self.random_size = (10, 20)
|
||||
self.exp_name = os.path.split(os.path.realpath(__file__))[1].split(".")[0]
|
||||
Reference in New Issue
Block a user