Compare commits

19 Commits
v0.01 ... main

Author SHA1 Message Date
c2bbbe03bc update repo comtroller 2026-01-07 12:32:31 +01:00
68f4f0b664 repo structure 2026-01-04 18:13:53 +01:00
3ec1c08e15 repo structure 2026-01-04 15:51:17 +01:00
a13b8ca858 repo structure 2026-01-04 15:33:24 +01:00
b8e412f4b1 repo structure 2026-01-04 15:29:33 +01:00
0e993afad5 fix path issue 2026-01-04 15:12:02 +01:00
2c43e719e3 update issues of freeze 2026-01-04 14:54:35 +01:00
bb80a65346 add docs 2025-12-28 04:55:02 +01:00
f7d58fb6c0 update DB table 2025-12-28 03:49:51 +01:00
1ae11b02ce update DB table 2025-12-28 03:46:11 +01:00
de08431e53 update DB table 2025-12-28 03:36:29 +01:00
393e78defc update DB table 2025-12-28 03:28:14 +01:00
b1bebde582 update DB table 2025-12-28 03:22:12 +01:00
69be5dfe52 update DB table 2025-12-28 03:15:12 +01:00
414042cb2b update DB table 2025-12-28 03:08:48 +01:00
db8ea58f6d update DB table 2025-12-28 00:47:17 +01:00
bc2ab5d799 update DB table 2025-12-28 00:43:37 +01:00
3f903f8fcc update DB table 2025-12-27 20:08:16 +01:00
58be2d600c continue implementation 2025-12-27 20:01:45 +01:00
22 changed files with 862 additions and 95 deletions

36
README.md Normal file
View File

@@ -0,0 +1,36 @@
# TestArena Backend
TestArena is an automated build and test execution system for ESP32 projects. It provides a complete workflow for cloning repositories, building firmware, and running tests in a QEMU environment, all managed through a modern web dashboard.
## 🚀 Quick Start
1. **Deploy**: Run `sudo ./deploy.sh` on your Ubuntu server.
2. **Access**: Open `http://<server-ip>:8080/` in your browser.
3. **Monitor**: Use the dashboard to track test queues, view individual tasks, and check service health.
4. **Restart**: If services need a manual restart, use `sudo ./restart_services.sh`.
## 🛠️ Key Features
- **Service Robustness**: Systemd services are configured to auto-restart on failure and after reboot.
- **Monitoring Dashboard**: Real-time status of App and Worker services, plus detailed task tracking for each queue.
- **Task Timeouts**: Running tasks have a 1-hour timeout to prevent queue blocking.
- **Remote Management**: A dedicated restart script for easy remote execution via SSH.
## 📚 Documentation
For detailed information, please refer to the documentation in the `doc/` folder:
* **[Architecture & Design](doc/architecture.md)**: How the system is built.
* **[Usage Guide](doc/usage.md)**: How to use and manage the system.
* **[API Reference](doc/api_reference.md)**: Integration details.
* **[Workflows](doc/flow_diagrams.md)**: Visual flowcharts of key processes.
## 🛠️ Technology Stack
* **Backend**: FastAPI, SQLAlchemy, SQLite
* **Worker**: Python Subprocess, Bash
* **Frontend**: Vanilla HTML/JS (Glassmorphism UI)
* **Infrastructure**: Nginx, Systemd, ESP-IDF, QEMU
---
© 2025 TestArena Team

View File

@@ -1,14 +1,8 @@
import os import os
import sys import sys
import json import json
import subprocess
from scenario_exe_parser import parse_test_scenario from scenario_exe_parser import parse_test_scenario
import subprocess
import os
import sys
import json
import subprocess
# Assuming parse_test_scenario is imported correctly
# from scenario_exe_parser import parse_test_scenario
# --- Global Paths --- # --- Global Paths ---
current_directory = os.path.dirname(os.path.abspath(__file__)) current_directory = os.path.dirname(os.path.abspath(__file__))
@@ -72,20 +66,38 @@ REPORT_TEMPLATE = """
def run_test_suite(tasks): def run_test_suite(tasks):
aggregated_results = {} aggregated_results = {}
shell_script = "./TPF/test_execution.sh" # Use path relative to this script
script_dir = os.path.dirname(os.path.abspath(__file__))
shell_script = os.path.join(script_dir, "test_execution.sh")
shell_script = "/home/asf/testarena_backend/TPF/test_execution.sh"
if os.name != 'nt': if os.name != 'nt':
subprocess.run(["chmod", "+x", shell_script]) subprocess.run(["chmod", "+x", shell_script])
print("tasks:", tasks)
for task in tasks: for task in tasks:
print(f"--- Starting Task: {task['id']} ---") print(f"--- Starting Task: {task['id']} ---")
result = subprocess.run(
# Use Popen to stream output in real-time
env = os.environ.copy()
env["PYTHONUNBUFFERED"] = "1"
process = subprocess.Popen(
[shell_script, task['id'], task['cmd'], task['path'], REPO_PATH], [shell_script, task['id'], task['cmd'], task['path'], REPO_PATH],
capture_output=True, text=True stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
text=True,
bufsize=1,
universal_newlines=True,
env=env
) )
print(result.stdout)
full_output = ""
for line in process.stdout:
print(line, end="")
full_output += line
process.wait()
json_found = False json_found = False
for line in result.stdout.splitlines(): for line in full_output.splitlines():
if line.startswith("FINAL_JSON_OUTPUT:"): if line.startswith("FINAL_JSON_OUTPUT:"):
json_string = line.replace("FINAL_JSON_OUTPUT:", "").strip() json_string = line.replace("FINAL_JSON_OUTPUT:", "").strip()
try: try:
@@ -97,6 +109,8 @@ def run_test_suite(tasks):
if not json_found: if not json_found:
aggregated_results[task['id']] = ["ERROR", "N/A"] aggregated_results[task['id']] = ["ERROR", "N/A"]
else:
print(f"--- Completed Task: {json_found} ---")
return aggregated_results return aggregated_results
def generate_html_report(scenario_name, results, output_path): def generate_html_report(scenario_name, results, output_path):
@@ -130,13 +144,14 @@ def generate_html_report(scenario_name, results, output_path):
f.write(report_content) f.write(report_content)
print(f"HTML Report generated at: {report_file}") print(f"HTML Report generated at: {report_file}")
def save_summary(results, task_id_path): def save_summary(results):
json_path = os.path.join(task_id_path, "final_summary.json") json_path = os.path.join(task_id_path, "final_summary.json")
with open(json_path, "w") as f: with open(json_path, "w") as f:
json.dump(results, f, indent=4) json.dump(results, f, indent=4)
print(f"\nFinal results saved to {json_path}") print(f"\nFinal results saved to {json_path}")
if __name__ == "__main__": if __name__ == "__main__":
exit_code = 0
if len(sys.argv) > 3: if len(sys.argv) > 3:
queue_id = sys.argv[1] #"1234" queue_id = sys.argv[1] #"1234"
scenario_path = sys.argv[2] #"application_layer/business_stack/actuator_manager/test/actuator_manager_init_test.test_scenario.xml" scenario_path = sys.argv[2] #"application_layer/business_stack/actuator_manager/test/actuator_manager_init_test.test_scenario.xml"
@@ -162,9 +177,20 @@ if __name__ == "__main__":
"cmd": exec_cmd, "cmd": exec_cmd,
"path": task_id_path "path": task_id_path
}) })
if not my_tasks:
final_data = run_test_suite(my_tasks) print("No test cases found in the scenario.")
save_summary(final_data, task_id_path) final_data= {
"INVALID_SCENARIO": [
"FAIL",
"N/A"
],
}
exit_code = 1
else:
final_data = run_test_suite(my_tasks)
print("\n--- Final Aggregated Results ---", final_data)
save_summary(final_data)
# Generate report INSIDE the task folder # Generate report INSIDE the task folder
generate_html_report(os.path.basename(scenario_path), final_data, task_id_path) generate_html_report(os.path.basename(scenario_path), final_data, task_id_path)
sys.exit(exit_code)

View File

@@ -37,14 +37,18 @@ EOF
# 1. CD into the repo path # 1. CD into the repo path
# 2. Execute command and capture output # 2. Execute command and capture output
# 3. PIPESTATUS[1] captures the exit code of the CMD, not the 'cd' or 'tee' # 3. PIPESTATUS[0] captures the exit code of the eval "$CMD"
cd "$REPO_PATH" && eval "$CMD" 2>&1 | tee -a >(sed 's/$/<br>/' >> "$LOG_FILE") export PYTHONUNBUFFERED=1
echo "--- Execution Start ---" | tee -a >(sed 's/$/<br>/' >> "$LOG_FILE")
cd "$REPO_PATH" && stdbuf -oL -eL /bin/bash -c "$CMD" 2>&1 | tee -a >(sed 's/$/<br>/' >> "$LOG_FILE")
EXIT_CODE=${PIPESTATUS[0]} EXIT_CODE=${PIPESTATUS[0]}
echo "--- Execution End (Exit Code: $EXIT_CODE) ---" | tee -a >(sed 's/$/<br>/' >> "$LOG_FILE")
# Close HTML tags # Close HTML tags
echo "</div></body></html>" >> "$LOG_FILE" echo "</div></body></html>" >> "$LOG_FILE"
# Determine PASS/FAIL # Determine PASS/FAIL
# We consider it a FAIL if the exit code is non-zero
if [ $EXIT_CODE -eq 0 ]; then if [ $EXIT_CODE -eq 0 ]; then
RESULT="PASS" RESULT="PASS"
else else

View File

@@ -16,7 +16,17 @@ echo "🚀 Starting TestArena Deployment..."
# 1. Install System Dependencies # 1. Install System Dependencies
echo "📦 Installing system dependencies..." echo "📦 Installing system dependencies..."
apt-get update apt-get update
apt-get install -y nginx python3-pip python3-venv apt-get install -y nginx python3-pip python3-venv sqlite3 perl
# 1.1 Database Migration (Add source column if missing)
echo "🗄️ Checking database schema..."
DB_PATH="/home/asf/testarena/testarena.db"
if [ -f "$DB_PATH" ]; then
if ! sqlite3 "$DB_PATH" ".schema queues" | grep -q "source"; then
echo " Adding 'source' column to 'queues' table..."
sqlite3 "$DB_PATH" "ALTER TABLE queues ADD COLUMN source TEXT;"
fi
fi
# 2. Set up Python Virtual Environment # 2. Set up Python Virtual Environment
echo "🐍 Setting up Python environment..." echo "🐍 Setting up Python environment..."
@@ -35,11 +45,20 @@ else
echo "⚠️ Nginx configuration not found, skipping..." echo "⚠️ Nginx configuration not found, skipping..."
fi fi
# 4. Create Data Directory # 4. Create Data and TPF Directories
echo "📁 Creating data directory..." echo "📁 Creating directories..."
mkdir -p /home/asf/testarena mkdir -p /home/asf/testarena
mkdir -p /home/asf/testarena_backend/TPF
chown -R asf:asf /home/asf/testarena chown -R asf:asf /home/asf/testarena
chown -R asf:asf /home/asf/testarena_backend/TPF
chmod -R 755 /home/asf/testarena chmod -R 755 /home/asf/testarena
chmod -R 755 /home/asf/testarena_backend/TPF
# Copy scripts to TPF
# Note: scenario_execution.py, etc. are already in TPF/ in the repo
cp gitea_repo_controller.sh /home/asf/testarena_backend/TPF/
chmod +x /home/asf/testarena_backend/TPF/*.sh
chmod +x /home/asf/testarena_backend/TPF/*.py
# 5. Set up Systemd Services # 5. Set up Systemd Services
echo "⚙️ Setting up Systemd services..." echo "⚙️ Setting up Systemd services..."

View File

@@ -14,19 +14,19 @@ sudo chmod +x deploy.sh
sudo ./deploy.sh sudo ./deploy.sh
``` ```
### 3. Start the Application Services ### 3. Manage Application Services
You should run these in the background or using a process manager like `pm2` or `systemd`. The application and worker are managed by `systemd`. You can control them using the following commands:
**Start the API Server:** **Check Status:**
```bash ```bash
source venv/bin/activate sudo systemctl status testarena-app
uvicorn testarena_app.main:app --host 0.0.0.0 --port 8000 sudo systemctl status testarena-worker
``` ```
**Start the Background Worker:** **Restart Services:**
```bash ```bash
source venv/bin/activate sudo systemctl restart testarena-app
python3 -m testarena_app.worker sudo systemctl restart testarena-worker
``` ```
--- ---
@@ -44,16 +44,16 @@ Navigate to:
You should see an automatic directory listing of `/home/asf/testarena/`. You should see an automatic directory listing of `/home/asf/testarena/`.
### 3. Test the Queue API ### 3. Test the Queue API
Run the following `curl` command to queue a test task: Run the following `curl` command to queue a test task with branch information:
```bash ```bash
curl -X POST http://asf-server.duckdns.org:8080/api/queue \ curl -X POST http://asf-server.duckdns.org:8080/api/queue \
-H "Content-Type: application/json" \ -H "Content-Type: application/json" \
-d '{ -d '{
"test_queue_001": [ "source": "add_esp_idf_io_wrappers",
"345": [
"staging", "staging",
{ {
"task_1": "/home/asf/scenarios/test1.py", "5555": "application_layer/business_stack/actuator_manager/test/actuator_manager_init_test.test_scenario.xml"
"task_2": "/home/asf/scenarios/test2.py"
} }
] ]
}' }'

22
doc/README.md Normal file
View File

@@ -0,0 +1,22 @@
# TestArena Backend Documentation
Welcome to the official documentation for the TestArena Backend. This system is designed to automate the build and test execution process for ESP32 projects using ESP-IDF and QEMU.
## Table of Contents
1. **[Architecture & Design](architecture.md)**: Overview of the system components, technology stack, and design principles.
2. **[Usage Guide](usage.md)**: Instructions on how to deploy, configure, and use the system.
3. **[API Reference](api_reference.md)**: Detailed documentation of the available REST API endpoints.
4. **[Workflows & Flowcharts](flow_diagrams.md)**: Visual representations of the system's key processes.
## Project Overview
TestArena is a robust platform for managing and executing automated test scenarios. It provides a web-based dashboard for monitoring test queues, real-time logging of execution steps, and detailed HTML reports of test results.
### Key Features
* **Automated Build & Test**: Automatically clones repositories, builds firmware, and executes tests in a QEMU environment.
* **Real-time Monitoring**: Live dashboard with search, sorting, and queue management (abort/delete).
* **Comprehensive Logging**: Timestamped and leveled logs for every step of the process.
* **Scalable Architecture**: Decoupled API and Worker services for better performance and reliability.
* **Easy Deployment**: Automated deployment script for quick setup on Ubuntu servers.

111
doc/api_reference.md Normal file
View File

@@ -0,0 +1,111 @@
# API Reference
The TestArena Backend provides a RESTful API for managing test queues and monitoring system status.
## Base URL
`http://<server-ip>:8080/api`
---
## Endpoints
### 1. Submit a New Queue
`POST /queue`
Submits a new set of tasks to the execution queue.
**Request Body:**
```json
{
"source": "string",
"<queue_id>": [
"environment_name",
{
"<task_id>": "path/to/scenario.xml"
}
]
}
```
**Response:**
```json
{
"status": "Queue OK",
"queue_id": "string"
}
```
---
### 2. List All Queues
`GET /queues`
Returns a list of all queues in the system, ordered by creation date (newest first).
**Response:**
```json
[
{
"id": "string",
"status": "Waiting|Running|Finished|Aborted",
"created_at": "ISO8601 Timestamp",
"environment": "string",
"source": "string"
}
]
```
---
### 3. Get Status
`GET /status/{id}`
Gets the status of a specific queue or task.
**Response:**
```json
{
"id": "string",
"type": "queue|task",
"status": "string"
}
```
---
### 4. Abort Queue or Task
`POST /abort/{id}`
Aborts a waiting or running queue or a single task.
**Response:**
```json
{
"id": "string",
"status": "Aborted"
}
```
---
### 5. Delete Queue
`DELETE /delete/{id}`
Permanently deletes a queue, its associated tasks, and all related files from the server.
**Response:**
```json
{
"id": "string",
"status": "Deleted"
}
```
---
## Error Handling
The API uses standard HTTP status codes:
* `200 OK`: Request successful.
* `404 Not Found`: The requested ID does not exist.
* `500 Internal Server Error`: An unexpected error occurred on the server.

64
doc/architecture.md Normal file
View File

@@ -0,0 +1,64 @@
# Architecture & Design
This document describes the high-level architecture and design of the TestArena Backend.
## System Architecture
TestArena follows a decoupled architecture consisting of a RESTful API and a background worker.
```mermaid
graph TD
User((User)) -->|HTTP| API[FastAPI Web Server]
API -->|Read/Write| DB[(SQLite Database)]
API -->|Create| FS[Filesystem /testarena]
Worker[Background Worker] -->|Poll| DB
Worker -->|Execute| Scripts[Execution Scripts]
Scripts -->|Build/Test| QEMU[QEMU Emulator]
Scripts -->|Write Logs| FS
User -->|View| Dashboard[Web Dashboard]
Dashboard -->|API Calls| API
```
## Core Components
### 1. FastAPI Web Server (`main.py`)
The entry point for the system. It handles:
* Receiving test queue requests via REST API.
* Managing the SQLite database.
* Serving the web dashboard and static files.
* Providing endpoints for status monitoring, aborting, and deleting queues.
### 2. Background Worker (`worker.py`)
A dedicated process that continuously polls the database for "Waiting" queues. Its responsibilities include:
* Cloning and checking out the correct branch of the target repository.
* Orchestrating the build process using `idf.py build`.
* Running the QEMU simulation.
* Executing individual test scenarios.
* Managing real-time logging to the filesystem.
### 3. Execution Scripts (`TPF/`)
A set of specialized scripts for different stages of the workflow:
* `gitea_repo_controller.sh`: Handles Git operations (clone, checkout, pull).
* `scenario_execution.py`: Parses XML scenarios and runs test suites.
* `test_execution.sh`: Executes individual test commands and generates HTML logs.
* `scenario_exe_parser.py`: Helper script for parsing XML scenario files.
## Technology Stack
| Layer | Technology |
| :--- | :--- |
| **Backend Framework** | FastAPI (Python 3.12+) |
| **Database** | SQLite with SQLAlchemy ORM |
| **Frontend** | Vanilla HTML5, CSS3 (Modern Glassmorphism UI), JavaScript |
| **Process Management** | Systemd (testarena-app, testarena-worker) |
| **Web Server / Proxy** | Nginx |
| **Build System** | ESP-IDF (Espressif IoT Development Framework) |
| **Emulator** | QEMU (XTENSA) |
| **Scripting** | Bash, Python |
## Design Principles
* **Decoupling**: The API and Worker are separate processes, allowing the UI to remain responsive even during heavy test execution.
* **Real-time Visibility**: All subprocess output is streamed in real-time to both the console and log files.
* **Robustness**: Implemented global timeouts, automatic cleanup of orphaned processes, and detailed error handling.
* **Automation**: The entire deployment and execution flow is fully automated to minimize manual intervention.

75
doc/flow_diagrams.md Normal file
View File

@@ -0,0 +1,75 @@
# Workflows & Flowcharts
This document provides visual representations of the key processes within the TestArena system.
## 1. Test Queue Submission Flow
This flowchart shows the process from when a user submits a queue until it is ready for the worker.
```mermaid
sequenceDiagram
participant User
participant API as FastAPI (main.py)
participant DB as SQLite
participant FS as Filesystem
User->>API: POST /api/queue (Payload)
API->>API: Extract Queue ID, Source, Tasks
API->>FS: Create /testarena/<queue_id> folder
API->>FS: Create queue_status.json
API->>DB: Insert Queue & Task records
API-->>User: 200 OK (Queue OK)
```
## 2. Worker Execution Workflow
This diagram illustrates the lifecycle of a test queue as processed by the background worker.
```mermaid
flowchart TD
Start([Start Worker]) --> Poll{Poll DB for 'Waiting'}
Poll -- No --> Wait[Wait 5s] --> Poll
Poll -- Yes --> Running[Set Status to 'Running']
Running --> Clone[Clone/Checkout Repo]
Clone --> Build[Build Firmware: idf.py build]
Build --> QEMU[Start QEMU: idf.py qemu]
QEMU --> Loop[Loop through Tasks]
subgraph Task Execution
Loop --> RunScript[Run scenario_execution.py]
RunScript --> Stream[Stream Output to Log]
Stream --> Parse[Parse Results]
Parse --> UpdateDB[Update Task Status & Result]
end
UpdateDB --> Next{More Tasks?}
Next -- Yes --> Loop
Next -- No --> Finish[Set Queue to 'Finished']
Finish --> Wait
```
## 3. Real-time Logging Architecture
How logs are captured and displayed to the user.
```mermaid
graph LR
Sub[Subprocess] -->|Stdout/Stderr| Pipe[Pipe]
Pipe -->|Read Line| Worker[worker.py]
Worker -->|Write| File[queue_log.txt]
Worker -->|Print| Console[Systemd Journal]
User -->|View| Dashboard[Web Dashboard]
Dashboard -->|Fetch| API[FastAPI]
API -->|Read| File
```
## 4. Delete Queue Workflow
The process of cleaning up system data.
```mermaid
flowchart LR
Req[DELETE /api/delete/{id}] --> DB[Delete DB Records]
DB --> FS[Remove /testarena/{id} Directory]
FS --> Res[Return Success]
```

80
doc/usage.md Normal file
View File

@@ -0,0 +1,80 @@
# Usage Guide
This guide provides instructions on how to deploy, configure, and use the TestArena system.
## Deployment
The system is designed to be deployed on an Ubuntu server. An automated deployment script is provided.
### Prerequisites
* Ubuntu 22.04 or later.
* ESP-IDF installed at `/home/asf/esp/esp-idf`.
* QEMU (XTENSA) installed and available in the system path.
### Installation Steps
1. Clone the repository to `/home/asf/testarena_backend`.
2. Navigate to the directory: `cd /home/asf/testarena_backend`.
3. Run the deployment script with sudo:
```bash
sudo ./deploy.sh
```
This script will:
* Install system dependencies (`nginx`, `sqlite3`, `perl`, etc.).
* Set up a Python virtual environment and install requirements.
* Configure Nginx as a reverse proxy.
* Set up and enable systemd services for the API and Worker.
## Managing Services
Use standard `systemctl` commands to manage the TestArena services:
```bash
# Restart services
sudo systemctl restart testarena-app testarena-worker
# Check status
sudo systemctl status testarena-app testarena-worker
# View logs
sudo journalctl -u testarena-worker -f
```
## Using the Dashboard
Access the dashboard at `http://<server-ip>:8080/`.
### Features:
* **Queue Monitor**: View all test queues, their status, and environment.
* **Search**: Filter queues by Queue ID using the search box.
* **Sorting**: Click on table headers (Queue ID, Environment, Status) to sort the data.
* **Actions**:
* **Abort**: Stop a running or waiting queue.
* **Delete**: Permanently remove a queue's data from the database and filesystem.
* **Live Logs**: View real-time system logs in the sidebar.
## Submitting a Test Queue
You can submit a new test queue by sending a POST request to `/api/queue`.
### Example Payload:
```json
{
"source": "feature/new-sensor",
"QUEUE_12345": [
"Production_Env",
{
"TASK_001": "path/to/scenario_1.xml",
"TASK_002": "path/to/scenario_2.xml"
}
]
}
```
## Viewing Results
Test results are stored in `/home/asf/testarena/<queue_id>/`.
* `queue_log.txt`: The full execution log for the entire queue.
* `<task_id>/execution_report.html`: A detailed HTML report for a specific task.
* `<task_id>/<case_id>-logging.html`: Individual logs for each test case.
You can also browse results via the web interface at `http://<server-ip>:8080/results/`.

View File

@@ -72,11 +72,8 @@ checkout_branch() {
git pull "${AUTH_URL}" main git pull "${AUTH_URL}" main
echo "🌿 Checking out target branch: ${BRANCH_NAME}" echo "🌿 Checking out target branch: ${BRANCH_NAME}"
if git show-ref --verify --quiet "refs/heads/${BRANCH_NAME}"; then
git checkout "${BRANCH_NAME}" git checkout "${BRANCH_NAME}"
else
git checkout -b "${BRANCH_NAME}" "origin/${BRANCH_NAME}"
fi
echo "⬆️ Rebasing '${BRANCH_NAME}' onto latest main..." echo "⬆️ Rebasing '${BRANCH_NAME}' onto latest main..."
git rebase main git rebase main

24
restart_services.sh Normal file
View File

@@ -0,0 +1,24 @@
#!/bin/bash
# TestArena Service Restart Script
# This script restarts all components of the TestArena system.
# Usage: sudo ./restart_services.sh
if [ "$EUID" -ne 0 ]; then
echo "❌ Please run as root (use sudo ./restart_services.sh)"
exit 1
fi
echo "🔄 Restarting TestArena Services..."
echo "🌐 Restarting Nginx..."
systemctl restart nginx
echo "📱 Restarting TestArena App..."
systemctl restart testarena-app
echo "⚙️ Restarting TestArena Worker..."
systemctl restart testarena-worker
echo "✅ All services restarted!"
systemctl status testarena-app testarena-worker nginx --no-pager

View File

@@ -6,10 +6,14 @@ After=network.target
User=asf User=asf
Group=asf Group=asf
WorkingDirectory=/home/asf/testarena_backend WorkingDirectory=/home/asf/testarena_backend
Environment="PATH=/home/asf/testarena_backend/venv/bin" Environment="PATH=/home/asf/testarena_backend/venv/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
Environment="IDF_PATH=/home/asf/esp/esp-idf"
Environment="XDG_RUNTIME_DIR=/tmp"
Environment="DATABASE_URL=sqlite:////home/asf/testarena/testarena.db" Environment="DATABASE_URL=sqlite:////home/asf/testarena/testarena.db"
ExecStart=/home/asf/testarena_backend/venv/bin/uvicorn testarena_app.main:app --host 0.0.0.0 --port 8000 ExecStart=/home/asf/testarena_backend/venv/bin/uvicorn testarena_app.main:app --host 0.0.0.0 --port 8000
Restart=always Restart=always
RestartSec=10
StartLimitIntervalSec=0
[Install] [Install]
WantedBy=multi-user.target WantedBy=multi-user.target

View File

@@ -6,10 +6,14 @@ After=network.target testarena-app.service
User=asf User=asf
Group=asf Group=asf
WorkingDirectory=/home/asf/testarena_backend WorkingDirectory=/home/asf/testarena_backend
Environment="PATH=/home/asf/testarena_backend/venv/bin" Environment="PATH=/home/asf/testarena_backend/venv/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
Environment="IDF_PATH=/home/asf/esp/esp-idf"
Environment="XDG_RUNTIME_DIR=/tmp"
Environment="DATABASE_URL=sqlite:////home/asf/testarena/testarena.db" Environment="DATABASE_URL=sqlite:////home/asf/testarena/testarena.db"
ExecStart=/home/asf/testarena_backend/venv/bin/python3 -m testarena_app.worker ExecStart=/home/asf/testarena_backend/venv/bin/python3 -m testarena_app.worker
Restart=always Restart=always
RestartSec=10
StartLimitIntervalSec=0
[Install] [Install]
WantedBy=multi-user.target WantedBy=multi-user.target

View File

@@ -15,6 +15,10 @@ static_dir = os.path.join(os.path.dirname(__file__), "static")
os.makedirs(static_dir, exist_ok=True) os.makedirs(static_dir, exist_ok=True)
app.mount("/static", StaticFiles(directory=static_dir), name="static") app.mount("/static", StaticFiles(directory=static_dir), name="static")
@app.get("/favicon.ico", include_in_schema=False)
async def favicon():
return FileResponse(os.path.join(static_dir, "favicon.png"))
# Base directory for data as requested # Base directory for data as requested
BASE_DATA_DIR = "/home/asf/testarena" BASE_DATA_DIR = "/home/asf/testarena"
# For local development on Windows, we might need to adjust this, # For local development on Windows, we might need to adjust this,
@@ -31,10 +35,13 @@ models.Base.metadata.create_all(bind=database.engine)
@app.post("/api/queue") @app.post("/api/queue")
async def queue_task(payload: Dict, db: Session = Depends(database.get_db)): async def queue_task(payload: Dict, db: Session = Depends(database.get_db)):
""" """
Input json contain {<queue_ID> :[environment, "<TASK_ID>" : "<path to scenario>],} Input json contain {"source": "<branch_name>", <queue_ID> :[environment, {"<TASK_ID>" : "<path to scenario>"},]}
""" """
try: try:
queue_id = list(payload.keys())[0] source = payload.get("source", "main")
# Find the queue_id key (it's the one that isn't "source")
queue_id = next(k for k in payload.keys() if k != "source")
data = payload[queue_id] data = payload[queue_id]
environment = data[0] environment = data[0]
tasks_data = data[1] # This is a dict {"TASK_ID": "path"} tasks_data = data[1] # This is a dict {"TASK_ID": "path"}
@@ -47,12 +54,13 @@ async def queue_task(payload: Dict, db: Session = Depends(database.get_db)):
status_file = os.path.join(queue_dir, "queue_status.json") status_file = os.path.join(queue_dir, "queue_status.json")
queue_status = { queue_status = {
"queue_id": queue_id, "queue_id": queue_id,
"source": source,
"status": "Waiting", "status": "Waiting",
"tasks": {} "tasks": {}
} }
# 3. Save to database and prepare status file # 3. Save to database and prepare status file
new_queue = models.Queue(id=queue_id, environment=environment, status="Waiting") new_queue = models.Queue(id=queue_id, environment=environment, source=source, status="Waiting")
db.add(new_queue) db.add(new_queue)
for task_id, scenario_path in tasks_data.items(): for task_id, scenario_path in tasks_data.items():
@@ -134,6 +142,46 @@ async def list_queues(db: Session = Depends(database.get_db)):
queues = db.query(models.Queue).order_by(models.Queue.created_at.desc()).all() queues = db.query(models.Queue).order_by(models.Queue.created_at.desc()).all()
return queues return queues
@app.delete("/api/delete/{id}")
async def delete_queue(id: str, db: Session = Depends(database.get_db)):
# 1. Delete from database
queue = db.query(models.Queue).filter(models.Queue.id == id).first()
if queue:
# Delete associated tasks first
db.query(models.Task).filter(models.Task.queue_id == id).delete()
db.delete(queue)
db.commit()
# 2. Delete folder
queue_dir = os.path.join(BASE_DATA_DIR, id)
if os.path.exists(queue_dir):
import shutil
shutil.rmtree(queue_dir)
return {"id": id, "status": "Deleted"}
raise HTTPException(status_code=404, detail="ID not found")
@app.get("/api/system/status")
async def system_status():
"""Check the status of system services"""
services = ["testarena-app", "testarena-worker", "nginx"]
status = {}
for service in services:
try:
# Use systemctl is-active for a quick check
res = os.system(f"systemctl is-active --quiet {service}")
status[service] = "online" if res == 0 else "offline"
except:
status[service] = "unknown"
return status
@app.get("/api/queue/{id}/tasks")
async def get_queue_tasks(id: str, db: Session = Depends(database.get_db)):
"""Get all tasks for a specific queue"""
tasks = db.query(models.Task).filter(models.Task.queue_id == id).all()
return tasks
@app.get("/") @app.get("/")
async def root(): async def root():
return FileResponse(os.path.join(static_dir, "index.html")) return FileResponse(os.path.join(static_dir, "index.html"))

View File

@@ -12,6 +12,7 @@ class Queue(Base):
status = Column(String, default="Waiting") # Finished, Waiting, Running, Aborted status = Column(String, default="Waiting") # Finished, Waiting, Running, Aborted
created_at = Column(DateTime, default=datetime.datetime.utcnow) created_at = Column(DateTime, default=datetime.datetime.utcnow)
environment = Column(String) environment = Column(String)
source = Column(String) # Branch name
tasks = relationship("Task", back_populates="queue", cascade="all, delete-orphan") tasks = relationship("Task", back_populates="queue", cascade="all, delete-orphan")

Binary file not shown.

After

Width:  |  Height:  |  Size: 295 KiB

View File

@@ -5,6 +5,7 @@
<meta charset="UTF-8"> <meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0"> <meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>TestArena | Modern Dashboard</title> <title>TestArena | Modern Dashboard</title>
<link rel="icon" type="image/png" href="/static/favicon.png">
<link href="https://fonts.googleapis.com/css2?family=Outfit:wght@300;400;600;700&display=swap" rel="stylesheet"> <link href="https://fonts.googleapis.com/css2?family=Outfit:wght@300;400;600;700&display=swap" rel="stylesheet">
<style> <style>
:root { :root {
@@ -209,6 +210,12 @@
color: #f87171; color: #f87171;
} }
.status-timed-out {
background: rgba(245, 158, 11, 0.1);
color: #fbbf24;
border: 1px solid rgba(245, 158, 11, 0.3);
}
.btn-abort { .btn-abort {
background: rgba(239, 68, 68, 0.1); background: rgba(239, 68, 68, 0.1);
color: #f87171; color: #f87171;
@@ -277,10 +284,8 @@
<div class="container"> <div class="container">
<header> <header>
<div class="logo"> <div class="logo">
<svg width="32" height="32" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2.5" <img src="/static/favicon.png" alt="TestArena Logo"
stroke-linecap="round" stroke-linejoin="round"> style="width: 40px; height: 40px; border-radius: 8px;">
<path d="M12 2L2 7l10 5 10-5-10-5zM2 17l10 5 10-5M2 12l10 5 10-5" />
</svg>
TestArena TestArena
</div> </div>
<nav class="nav-links"> <nav class="nav-links">
@@ -291,25 +296,41 @@
<div class="dot"></div> <div class="dot"></div>
<span>Connecting...</span> <span>Connecting...</span>
</div> </div>
<div id="service-status" style="display: flex; gap: 1rem;">
<div class="status-badge" title="App Service">
<div id="app-dot" class="dot"></div>
<span>App</span>
</div>
<div class="status-badge" title="Worker Service">
<div id="worker-dot" class="dot"></div>
<span>Worker</span>
</div>
</div>
</header> </header>
<div class="grid"> <div class="grid">
<div class="card"> <div class="card">
<h2> <div style="display: flex; justify-content: space-between; align-items: center; margin-bottom: 1.5rem;">
<svg width="20" height="20" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" <h2>
stroke-linecap="round" stroke-linejoin="round"> <svg width="20" height="20" viewBox="0 0 24 24" fill="none" stroke="currentColor"
<rect x="3" y="3" width="18" height="18" rx="2" ry="2" /> stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
<line x1="3" y1="9" x2="21" y2="9" /> <rect x="3" y="3" width="18" height="18" rx="2" ry="2" />
<line x1="9" y1="21" x2="9" y2="9" /> <line x1="3" y1="9" x2="21" y2="9" />
</svg> <line x1="9" y1="21" x2="9" y2="9" />
Queue Monitor </svg>
</h2> Queue Monitor
</h2>
<div style="position: relative; width: 300px;">
<input type="text" id="search-input" placeholder="Search Queue ID..."
style="width: 100%; padding: 0.6rem 1rem; border-radius: 0.75rem; background: var(--glass); border: 1px solid var(--glass-border); color: var(--text); font-family: inherit;">
</div>
</div>
<table id="queue-table"> <table id="queue-table">
<thead> <thead>
<tr> <tr>
<th>Queue ID</th> <th onclick="sortTable(0)" style="cursor: pointer;">Queue ID</th>
<th>Environment</th> <th onclick="sortTable(1)" style="cursor: pointer;">Environment</th>
<th>Status</th> <th onclick="sortTable(2)" style="cursor: pointer;">Status</th>
<th>Actions</th> <th>Actions</th>
</tr> </tr>
</thead> </thead>
@@ -317,6 +338,37 @@
<!-- Dynamic content --> <!-- Dynamic content -->
</tbody> </tbody>
</table> </table>
<div id="tasks-section"
style="margin-top: 3rem; display: none; border-top: 1px solid var(--glass-border); padding-top: 2rem;">
<div
style="display: flex; justify-content: space-between; align-items: center; margin-bottom: 1.5rem;">
<h2>
<svg width="20" height="20" viewBox="0 0 24 24" fill="none" stroke="currentColor"
stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
<path d="M9 11l3 3L22 4" />
<path d="M21 12v7a2 2 0 0 1-2 2H5a2 2 0 0 1-2-2V5a2 2 0 0 1 2-2h11" />
</svg>
Tasks for <span id="selected-queue-id"></span>
</h2>
<button class="btn-abort"
style="background: var(--glass); color: var(--text); border-color: var(--glass-border);"
onclick="hideTasks()">Close</button>
</div>
<table id="tasks-table">
<thead>
<tr>
<th>Task ID</th>
<th>Scenario</th>
<th>Status</th>
<th>Result</th>
</tr>
</thead>
<tbody>
<!-- Dynamic content -->
</tbody>
</table>
</div>
</div> </div>
<div class="card"> <div class="card">
@@ -338,30 +390,20 @@
</div> </div>
<script> <script>
let currentQueues = [];
let sortDirection = [true, true, true];
async function fetchStatus() { async function fetchStatus() {
try { try {
const response = await fetch('/api/queues'); const response = await fetch('/api/queues');
const queues = await response.json(); currentQueues = await response.json();
renderTable();
const tbody = document.querySelector('#queue-table tbody');
tbody.innerHTML = '';
queues.forEach(q => {
const tr = document.createElement('tr');
tr.innerHTML = `
<td style="font-weight: 600;">${q.id}</td>
<td><span style="opacity: 0.8;">${q.environment}</span></td>
<td><span class="status-pill status-${q.status.toLowerCase()}">${q.status}</span></td>
<td>
<button class="btn-abort" onclick="abortQueue('${q.id}')">Abort</button>
</td>
`;
tbody.appendChild(tr);
});
const badge = document.getElementById('connection-status'); const badge = document.getElementById('connection-status');
badge.querySelector('.dot').classList.add('online'); badge.querySelector('.dot').classList.add('online');
badge.querySelector('span').textContent = 'System Online'; badge.querySelector('span').textContent = 'System Online';
fetchServiceStatus();
} catch (e) { } catch (e) {
const badge = document.getElementById('connection-status'); const badge = document.getElementById('connection-status');
badge.querySelector('.dot').classList.remove('online'); badge.querySelector('.dot').classList.remove('online');
@@ -369,6 +411,62 @@
} }
} }
async function fetchServiceStatus() {
try {
const response = await fetch('/api/system/status');
const status = await response.json();
const appDot = document.getElementById('app-dot');
const workerDot = document.getElementById('worker-dot');
if (status['testarena-app'] === 'online') appDot.classList.add('online');
else appDot.classList.remove('online');
if (status['testarena-worker'] === 'online') workerDot.classList.add('online');
else workerDot.classList.remove('online');
} catch (e) { }
}
function renderTable() {
const searchTerm = document.getElementById('search-input').value.toLowerCase();
const tbody = document.querySelector('#queue-table tbody');
tbody.innerHTML = '';
const filteredQueues = currentQueues.filter(q => q.id.toLowerCase().includes(searchTerm));
filteredQueues.forEach(q => {
const tr = document.createElement('tr');
tr.innerHTML = `
<td style="font-weight: 600;">${q.id}</td>
<td><span style="opacity: 0.8;">${q.environment}</span></td>
<td><span class="status-pill status-${q.status.toLowerCase().replace(' ', '-')}">${q.status}</span></td>
<td style="display: flex; gap: 0.5rem;">
<button class="btn-abort" style="background: rgba(99, 102, 241, 0.1); color: #818cf8; border-color: rgba(99, 102, 241, 0.2);" onclick="viewTasks('${q.id}')">Tasks</button>
<button class="btn-abort" onclick="abortQueue('${q.id}')">Abort</button>
<button class="btn-abort" style="background: rgba(239, 68, 68, 0.2); border-color: var(--danger);" onclick="deleteQueue('${q.id}')">Delete</button>
</td>
`;
tbody.appendChild(tr);
});
}
function sortTable(n) {
sortDirection[n] = !sortDirection[n];
const keys = ['id', 'environment', 'status'];
const key = keys[n];
currentQueues.sort((a, b) => {
let valA = a[key].toLowerCase();
let valB = b[key].toLowerCase();
if (valA < valB) return sortDirection[n] ? -1 : 1;
if (valA > valB) return sortDirection[n] ? 1 : -1;
return 0;
});
renderTable();
}
document.getElementById('search-input').addEventListener('input', renderTable);
async function abortQueue(id) { async function abortQueue(id) {
if (confirm(`Are you sure you want to abort queue ${id}?`)) { if (confirm(`Are you sure you want to abort queue ${id}?`)) {
try { try {
@@ -381,6 +479,49 @@
} }
} }
async function deleteQueue(id) {
if (confirm(`Are you sure you want to DELETE queue ${id}? This will remove all files and database records.`)) {
try {
await fetch(`/api/delete/${id}`, { method: 'DELETE' });
addLog(`Deleted queue: ${id}`, 'danger');
fetchStatus();
} catch (e) {
addLog(`Failed to delete queue: ${id}`, 'danger');
}
}
}
async function viewTasks(queueId) {
document.getElementById('tasks-section').style.display = 'block';
document.getElementById('selected-queue-id').textContent = queueId;
document.getElementById('tasks-section').scrollIntoView({ behavior: 'smooth' });
try {
const response = await fetch(`/api/queue/${queueId}/tasks`);
const tasks = await response.json();
const tbody = document.querySelector('#tasks-table tbody');
tbody.innerHTML = '';
tasks.forEach(t => {
const tr = document.createElement('tr');
const resultStr = t.result ? JSON.stringify(t.result).substring(0, 50) + '...' : '-';
tr.innerHTML = `
<td>${t.id}</td>
<td title="${t.scenario_path}">${t.scenario_path.split('/').pop()}</td>
<td><span class="status-pill status-${t.status.toLowerCase().replace(' ', '-')}">${t.status}</span></td>
<td><small>${resultStr}</small></td>
`;
tbody.appendChild(tr);
});
} catch (e) {
addLog(`Failed to fetch tasks for ${queueId}`, 'danger');
}
}
function hideTasks() {
document.getElementById('tasks-section').style.display = 'none';
}
function addLog(msg, type = 'info') { function addLog(msg, type = 'info') {
const logs = document.getElementById('logs'); const logs = document.getElementById('logs');
const entry = document.createElement('div'); const entry = document.createElement('div');

View File

@@ -29,6 +29,90 @@ def update_json_status(queue_id, task_id, status, result=None):
with open(status_file, 'w') as f: with open(status_file, 'w') as f:
json.dump(data, f, indent=4) json.dump(data, f, indent=4)
import datetime
import time
import os
os.environ["XDG_RUNTIME_DIR"] = "/tmp"
def run_command_with_logging(cmd, log_file, cwd=None, env=None, timeout=1800, stop_string=None):
"""Runs a command, logs output, and optionally stops when a string is found."""
if env is None:
env = os.environ.copy()
if cwd is None:
cwd = os.getcwd()
start_time = time.time()
iso_start = datetime.datetime.now().isoformat()
with open(log_file, "a") as f:
msg = f"[{iso_start}] [INFO] Executing command: {cmd} in {cwd}\n"
print(msg, end="")
f.write(msg)
f.flush()
try:
process = subprocess.Popen(
cmd,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
shell=True,
text=True,
cwd=cwd,
env=env,
bufsize=1,
universal_newlines=True
)
# Read output line by line
while True:
# Check for global timeout
if time.time() - start_time > timeout:
process.kill()
iso_now = datetime.datetime.now().isoformat()
err_msg = f"[{iso_now}] [ERROR] Command timed out after {timeout} seconds\n"
print(err_msg, end="")
f.write(err_msg)
return 124 # Timeout exit code
line = process.stdout.readline()
if not line and process.poll() is not None:
break
if line:
iso_now = datetime.datetime.now().isoformat()
# Determine level (simple heuristic)
level = "INFO"
if any(word in line.lower() for word in ["error", "fail", "fatal", "critical"]):
level = "ERROR"
log_line = f"[{iso_now}] [{level}] {line}"
print(log_line, end="")
f.write(log_line)
f.flush()
# Check for stop string
if stop_string and stop_string in line:
iso_now = datetime.datetime.now().isoformat()
stop_msg = f"[{iso_now}] [INFO] Stop string '{stop_string}' detected. Terminating process.\n"
print(stop_msg, end="")
f.write(stop_msg)
f.flush()
process.terminate()
try:
process.wait(timeout=5)
except subprocess.TimeoutExpired:
process.kill()
return 0
return process.returncode
except Exception as e:
iso_now = datetime.datetime.now().isoformat()
err_msg = f"[{iso_now}] [ERROR] Exception during execution: {str(e)}\n"
print(err_msg, end="")
f.write(err_msg)
return 1
def run_worker(): def run_worker():
print("Worker started...") print("Worker started...")
while True: while True:
@@ -38,11 +122,32 @@ def run_worker():
queue = db.query(models.Queue).filter(models.Queue.status == "Waiting").order_by(models.Queue.created_at).first() queue = db.query(models.Queue).filter(models.Queue.status == "Waiting").order_by(models.Queue.created_at).first()
if queue: if queue:
print(f"Processing queue: {queue.id}") print(f"Processing queue: {queue.id} (Branch: {queue.source})")
queue.status = "Running" queue.status = "Running"
update_json_status(queue.id, None, "Running") update_json_status(queue.id, None, "Running")
db.commit() db.commit()
queue_dir = os.path.join(BASE_DATA_DIR, queue.id)
os.makedirs(queue_dir, exist_ok=True)
queue_log = os.path.join(queue_dir, "queue_log.txt")
# 0- Clone repository if not exists
clone_cmd = "./TPF/gitea_repo_controller.sh clone"
run_command_with_logging(clone_cmd, queue_log)
# 0.1- Checkout branch
checkout_cmd = f"./TPF/gitea_repo_controller.sh checkout {queue.source}"
run_command_with_logging(checkout_cmd, queue_log)
# Clean up any orphaned QEMU processes
run_command_with_logging("pkill -f qemu-system-xtensa || true", queue_log)
# 1-5 Build software and run QEMU
# We stop when we see the multicore app start message
build_cmd = f"/bin/bash -c 'export PYTHONUNBUFFERED=1 && source $HOME/esp/esp-idf/export.sh && cd TPF/Sensor_hub_repo && idf.py build && idf.py qemu'"
run_command_with_logging(build_cmd, queue_log, stop_string="cpu_start: Multicore app")
# 9- Loop for each task
tasks = db.query(models.Task).filter(models.Task.queue_id == queue.id, models.Task.status == "Waiting").all() tasks = db.query(models.Task).filter(models.Task.queue_id == queue.id, models.Task.status == "Waiting").all()
for task in tasks: for task in tasks:
@@ -51,32 +156,38 @@ def run_worker():
if queue.status == "Aborted": if queue.status == "Aborted":
break break
print(f"Running task: {task.id}")
task.status = "Running" task.status = "Running"
update_json_status(queue.id, task.id, "Running") update_json_status(queue.id, task.id, "Running")
db.commit() db.commit()
try: try:
# Run tpf_execution.py [queue_id, scenario_path, task_id] # Run scenario_execution.py queue_id scenario_path task_id
# Assuming tpf_execution.py is in the parent directory or accessible # It must be executed from TPF/Sensor_hub_repo with IDF sourced
script_path = "tpf_execution.py" script_path = os.path.abspath("./TPF/scenario_execution.py")
# For testing, let's assume it's in the same dir as the app or parent repo_dir = os.path.abspath("./TPF/Sensor_hub_repo")
cmd = ["python", script_path, queue.id, task.scenario_path, task.id] cmd = f"/bin/bash -c 'export PYTHONUNBUFFERED=1 && source $HOME/esp/esp-idf/export.sh && python3 {script_path} {queue.id} {task.scenario_path} {task.id}'"
result = subprocess.run(cmd, capture_output=True, text=True) task_dir = os.path.join(queue_dir, task.id)
os.makedirs(task_dir, exist_ok=True)
# Parse result if it returns json ret = run_command_with_logging(cmd, queue_log, cwd=repo_dir, timeout=3600)
try:
execution_result = json.loads(result.stdout)
except:
execution_result = {"output": result.stdout, "error": result.stderr}
task.status = "Finished" if ret == 0:
task.result = execution_result task.status = "Finished"
update_json_status(queue.id, task.id, "Finished", execution_result) elif ret == 124:
task.status = "Timed Out"
else:
task.status = "Error"
# Try to find the summary if it exists
summary_path = os.path.join(task_dir, "final_summary.json")
if os.path.exists(summary_path):
with open(summary_path, 'r') as f:
task.result = json.load(f)
update_json_status(queue.id, task.id, task.status, task.result)
except Exception as e: except Exception as e:
print(f"Error running task {task.id}: {e}")
task.status = "Error" task.status = "Error"
update_json_status(queue.id, task.id, "Error") update_json_status(queue.id, task.id, "Error")