update DB table
This commit is contained in:
@@ -16,7 +16,7 @@ echo "🚀 Starting TestArena Deployment..."
|
|||||||
# 1. Install System Dependencies
|
# 1. Install System Dependencies
|
||||||
echo "📦 Installing system dependencies..."
|
echo "📦 Installing system dependencies..."
|
||||||
apt-get update
|
apt-get update
|
||||||
apt-get install -y nginx python3-pip python3-venv sqlite3
|
apt-get install -y nginx python3-pip python3-venv sqlite3 perl
|
||||||
|
|
||||||
# 1.1 Database Migration (Add source column if missing)
|
# 1.1 Database Migration (Add source column if missing)
|
||||||
echo "🗄️ Checking database schema..."
|
echo "🗄️ Checking database schema..."
|
||||||
|
|||||||
@@ -29,8 +29,10 @@ def update_json_status(queue_id, task_id, status, result=None):
|
|||||||
with open(status_file, 'w') as f:
|
with open(status_file, 'w') as f:
|
||||||
json.dump(data, f, indent=4)
|
json.dump(data, f, indent=4)
|
||||||
|
|
||||||
|
import datetime
|
||||||
|
|
||||||
def run_command_with_logging(cmd, log_file, cwd=None, env=None):
|
def run_command_with_logging(cmd, log_file, cwd=None, env=None):
|
||||||
"""Runs a command, logs output to file and stdout."""
|
"""Runs a command, logs output to file and stdout with ISO timestamps."""
|
||||||
with open(log_file, "a") as f:
|
with open(log_file, "a") as f:
|
||||||
process = subprocess.Popen(
|
process = subprocess.Popen(
|
||||||
cmd,
|
cmd,
|
||||||
@@ -42,8 +44,10 @@ def run_command_with_logging(cmd, log_file, cwd=None, env=None):
|
|||||||
env=env
|
env=env
|
||||||
)
|
)
|
||||||
for line in process.stdout:
|
for line in process.stdout:
|
||||||
print(line, end="")
|
timestamp = datetime.datetime.now().isoformat()
|
||||||
f.write(line)
|
log_line = f"[{timestamp}] {line}"
|
||||||
|
print(log_line, end="")
|
||||||
|
f.write(log_line)
|
||||||
f.flush()
|
f.flush()
|
||||||
process.wait()
|
process.wait()
|
||||||
return process.returncode
|
return process.returncode
|
||||||
@@ -66,7 +70,12 @@ def run_worker():
|
|||||||
os.makedirs(queue_dir, exist_ok=True)
|
os.makedirs(queue_dir, exist_ok=True)
|
||||||
queue_log = os.path.join(queue_dir, "queue_log.txt")
|
queue_log = os.path.join(queue_dir, "queue_log.txt")
|
||||||
|
|
||||||
# 0- Checkout branch
|
# 0- Clone repository if not exists
|
||||||
|
print("Cloning repository...")
|
||||||
|
clone_cmd = "./TPF/gitea_repo_controller.sh clone"
|
||||||
|
run_command_with_logging(clone_cmd, queue_log)
|
||||||
|
|
||||||
|
# 0.1- Checkout branch
|
||||||
print(f"Checking out branch: {queue.source}")
|
print(f"Checking out branch: {queue.source}")
|
||||||
checkout_cmd = f"./TPF/gitea_repo_controller.sh checkout {queue.source}"
|
checkout_cmd = f"./TPF/gitea_repo_controller.sh checkout {queue.source}"
|
||||||
run_command_with_logging(checkout_cmd, queue_log)
|
run_command_with_logging(checkout_cmd, queue_log)
|
||||||
@@ -75,7 +84,8 @@ def run_worker():
|
|||||||
print("Building software...")
|
print("Building software...")
|
||||||
# We need to source the IDF export script and then build.
|
# We need to source the IDF export script and then build.
|
||||||
# Using a single shell command to maintain environment.
|
# Using a single shell command to maintain environment.
|
||||||
build_cmd = f"bash -c 'source $HOME/esp/esp-idf/export.sh && cd TPF/Sensor_hub_repo && idf.py build && idf.py qemu'"
|
# Explicitly use /bin/bash to avoid shell mismatch
|
||||||
|
build_cmd = f"/bin/bash -c 'source $HOME/esp/esp-idf/export.sh && cd TPF/Sensor_hub_repo && idf.py build && idf.py qemu'"
|
||||||
run_command_with_logging(build_cmd, queue_log)
|
run_command_with_logging(build_cmd, queue_log)
|
||||||
|
|
||||||
# 9- Loop for each task
|
# 9- Loop for each task
|
||||||
@@ -94,7 +104,6 @@ def run_worker():
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
# Run scenario_execution.py queue_id scenario_path task_id
|
# Run scenario_execution.py queue_id scenario_path task_id
|
||||||
# The user said it's in TPF/scenario_execution.py
|
|
||||||
script_path = "./TPF/scenario_execution.py"
|
script_path = "./TPF/scenario_execution.py"
|
||||||
# Use the same python environment
|
# Use the same python environment
|
||||||
cmd = f"python3 {script_path} {queue.id} {task.scenario_path} {task.id}"
|
cmd = f"python3 {script_path} {queue.id} {task.scenario_path} {task.id}"
|
||||||
@@ -102,10 +111,7 @@ def run_worker():
|
|||||||
# We want to capture this in the task log too
|
# We want to capture this in the task log too
|
||||||
task_dir = os.path.join(queue_dir, task.id)
|
task_dir = os.path.join(queue_dir, task.id)
|
||||||
os.makedirs(task_dir, exist_ok=True)
|
os.makedirs(task_dir, exist_ok=True)
|
||||||
task_log = os.path.join(task_dir, f"{task.id}-logging.html")
|
|
||||||
|
|
||||||
# For now, let's just log to stdout and the queue log
|
|
||||||
# scenario_execution.py already generates its own reports.
|
|
||||||
ret = run_command_with_logging(cmd, queue_log)
|
ret = run_command_with_logging(cmd, queue_log)
|
||||||
|
|
||||||
if ret == 0:
|
if ret == 0:
|
||||||
|
|||||||
Reference in New Issue
Block a user