update DB table

This commit is contained in:
2025-12-28 00:43:37 +01:00
parent 3f903f8fcc
commit bc2ab5d799
2 changed files with 16 additions and 10 deletions

View File

@@ -16,7 +16,7 @@ echo "🚀 Starting TestArena Deployment..."
# 1. Install System Dependencies
echo "📦 Installing system dependencies..."
apt-get update
apt-get install -y nginx python3-pip python3-venv sqlite3
apt-get install -y nginx python3-pip python3-venv sqlite3 perl
# 1.1 Database Migration (Add source column if missing)
echo "🗄️ Checking database schema..."

View File

@@ -29,8 +29,10 @@ def update_json_status(queue_id, task_id, status, result=None):
with open(status_file, 'w') as f:
json.dump(data, f, indent=4)
import datetime
def run_command_with_logging(cmd, log_file, cwd=None, env=None):
"""Runs a command, logs output to file and stdout."""
"""Runs a command, logs output to file and stdout with ISO timestamps."""
with open(log_file, "a") as f:
process = subprocess.Popen(
cmd,
@@ -42,8 +44,10 @@ def run_command_with_logging(cmd, log_file, cwd=None, env=None):
env=env
)
for line in process.stdout:
print(line, end="")
f.write(line)
timestamp = datetime.datetime.now().isoformat()
log_line = f"[{timestamp}] {line}"
print(log_line, end="")
f.write(log_line)
f.flush()
process.wait()
return process.returncode
@@ -66,7 +70,12 @@ def run_worker():
os.makedirs(queue_dir, exist_ok=True)
queue_log = os.path.join(queue_dir, "queue_log.txt")
# 0- Checkout branch
# 0- Clone repository if not exists
print("Cloning repository...")
clone_cmd = "./TPF/gitea_repo_controller.sh clone"
run_command_with_logging(clone_cmd, queue_log)
# 0.1- Checkout branch
print(f"Checking out branch: {queue.source}")
checkout_cmd = f"./TPF/gitea_repo_controller.sh checkout {queue.source}"
run_command_with_logging(checkout_cmd, queue_log)
@@ -75,7 +84,8 @@ def run_worker():
print("Building software...")
# We need to source the IDF export script and then build.
# Using a single shell command to maintain environment.
build_cmd = f"bash -c 'source $HOME/esp/esp-idf/export.sh && cd TPF/Sensor_hub_repo && idf.py build && idf.py qemu'"
# Explicitly use /bin/bash to avoid shell mismatch
build_cmd = f"/bin/bash -c 'source $HOME/esp/esp-idf/export.sh && cd TPF/Sensor_hub_repo && idf.py build && idf.py qemu'"
run_command_with_logging(build_cmd, queue_log)
# 9- Loop for each task
@@ -94,7 +104,6 @@ def run_worker():
try:
# Run scenario_execution.py queue_id scenario_path task_id
# The user said it's in TPF/scenario_execution.py
script_path = "./TPF/scenario_execution.py"
# Use the same python environment
cmd = f"python3 {script_path} {queue.id} {task.scenario_path} {task.id}"
@@ -102,10 +111,7 @@ def run_worker():
# We want to capture this in the task log too
task_dir = os.path.join(queue_dir, task.id)
os.makedirs(task_dir, exist_ok=True)
task_log = os.path.join(task_dir, f"{task.id}-logging.html")
# For now, let's just log to stdout and the queue log
# scenario_execution.py already generates its own reports.
ret = run_command_with_logging(cmd, queue_log)
if ret == 0: