continue implementation

This commit is contained in:
2025-12-27 20:01:45 +01:00
parent 79e4825b44
commit 58be2d600c
5 changed files with 85 additions and 21 deletions

View File

@@ -35,11 +35,21 @@ else
echo "⚠️ Nginx configuration not found, skipping..."
fi
# 4. Create Data Directory
echo "📁 Creating data directory..."
# 4. Create Data and TPF Directories
echo "📁 Creating directories..."
mkdir -p /home/asf/testarena
mkdir -p /home/asf/testarena_backend/TPF
chown -R asf:asf /home/asf/testarena
chown -R asf:asf /home/asf/testarena_backend/TPF
chmod -R 755 /home/asf/testarena
chmod -R 755 /home/asf/testarena_backend/TPF
# Copy scripts to TPF
cp gitea_repo_controller.sh /home/asf/testarena_backend/TPF/
cp scenario_execution.py /home/asf/testarena_backend/TPF/
cp scenario_exe_parser.py /home/asf/testarena_backend/TPF/
cp test_execution.sh /home/asf/testarena_backend/TPF/
chmod +x /home/asf/testarena_backend/TPF/*.sh
# 5. Set up Systemd Services
echo "⚙️ Setting up Systemd services..."

View File

@@ -72,7 +72,10 @@ REPORT_TEMPLATE = """
def run_test_suite(tasks):
aggregated_results = {}
shell_script = "./TPF/test_execution.sh"
# Use path relative to this script
script_dir = os.path.dirname(os.path.abspath(__file__))
shell_script = os.path.join(script_dir, "test_execution.sh")
if os.name != 'nt':
subprocess.run(["chmod", "+x", shell_script])

View File

@@ -31,10 +31,13 @@ models.Base.metadata.create_all(bind=database.engine)
@app.post("/api/queue")
async def queue_task(payload: Dict, db: Session = Depends(database.get_db)):
"""
Input json contain {<queue_ID> :[environment, "<TASK_ID>" : "<path to scenario>],}
Input json contain {"source": "<branch_name>", <queue_ID> :[environment, {"<TASK_ID>" : "<path to scenario>"},]}
"""
try:
queue_id = list(payload.keys())[0]
source = payload.get("source", "main")
# Find the queue_id key (it's the one that isn't "source")
queue_id = next(k for k in payload.keys() if k != "source")
data = payload[queue_id]
environment = data[0]
tasks_data = data[1] # This is a dict {"TASK_ID": "path"}
@@ -47,12 +50,13 @@ async def queue_task(payload: Dict, db: Session = Depends(database.get_db)):
status_file = os.path.join(queue_dir, "queue_status.json")
queue_status = {
"queue_id": queue_id,
"source": source,
"status": "Waiting",
"tasks": {}
}
# 3. Save to database and prepare status file
new_queue = models.Queue(id=queue_id, environment=environment, status="Waiting")
new_queue = models.Queue(id=queue_id, environment=environment, source=source, status="Waiting")
db.add(new_queue)
for task_id, scenario_path in tasks_data.items():

View File

@@ -12,6 +12,7 @@ class Queue(Base):
status = Column(String, default="Waiting") # Finished, Waiting, Running, Aborted
created_at = Column(DateTime, default=datetime.datetime.utcnow)
environment = Column(String)
source = Column(String) # Branch name
tasks = relationship("Task", back_populates="queue", cascade="all, delete-orphan")

View File

@@ -29,6 +29,25 @@ def update_json_status(queue_id, task_id, status, result=None):
with open(status_file, 'w') as f:
json.dump(data, f, indent=4)
def run_command_with_logging(cmd, log_file, cwd=None, env=None):
"""Runs a command, logs output to file and stdout."""
with open(log_file, "a") as f:
process = subprocess.Popen(
cmd,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
shell=True,
text=True,
cwd=cwd,
env=env
)
for line in process.stdout:
print(line, end="")
f.write(line)
f.flush()
process.wait()
return process.returncode
def run_worker():
print("Worker started...")
while True:
@@ -38,11 +57,28 @@ def run_worker():
queue = db.query(models.Queue).filter(models.Queue.status == "Waiting").order_by(models.Queue.created_at).first()
if queue:
print(f"Processing queue: {queue.id}")
print(f"Processing queue: {queue.id} (Branch: {queue.source})")
queue.status = "Running"
update_json_status(queue.id, None, "Running")
db.commit()
queue_dir = os.path.join(BASE_DATA_DIR, queue.id)
os.makedirs(queue_dir, exist_ok=True)
queue_log = os.path.join(queue_dir, "queue_log.txt")
# 0- Checkout branch
print(f"Checking out branch: {queue.source}")
checkout_cmd = f"./TPF/gitea_repo_controller.sh checkout {queue.source}"
run_command_with_logging(checkout_cmd, queue_log)
# 1-5 Build software
print("Building software...")
# We need to source the IDF export script and then build.
# Using a single shell command to maintain environment.
build_cmd = f"bash -c 'source $HOME/esp/esp-idf/export.sh && cd TPF/Sensor_hub_repo && idf.py build && idf.py qemu'"
run_command_with_logging(build_cmd, queue_log)
# 9- Loop for each task
tasks = db.query(models.Task).filter(models.Task.queue_id == queue.id, models.Task.status == "Waiting").all()
for task in tasks:
@@ -57,23 +93,33 @@ def run_worker():
db.commit()
try:
# Run tpf_execution.py [queue_id, scenario_path, task_id]
# Assuming tpf_execution.py is in the parent directory or accessible
script_path = "tpf_execution.py"
# For testing, let's assume it's in the same dir as the app or parent
cmd = ["python", script_path, queue.id, task.scenario_path, task.id]
# Run scenario_execution.py queue_id scenario_path task_id
# The user said it's in TPF/scenario_execution.py
script_path = "./TPF/scenario_execution.py"
# Use the same python environment
cmd = f"python3 {script_path} {queue.id} {task.scenario_path} {task.id}"
result = subprocess.run(cmd, capture_output=True, text=True)
# We want to capture this in the task log too
task_dir = os.path.join(queue_dir, task.id)
os.makedirs(task_dir, exist_ok=True)
task_log = os.path.join(task_dir, f"{task.id}-logging.html")
# Parse result if it returns json
try:
execution_result = json.loads(result.stdout)
except:
execution_result = {"output": result.stdout, "error": result.stderr}
# For now, let's just log to stdout and the queue log
# scenario_execution.py already generates its own reports.
ret = run_command_with_logging(cmd, queue_log)
if ret == 0:
task.status = "Finished"
task.result = execution_result
update_json_status(queue.id, task.id, "Finished", execution_result)
else:
task.status = "Error"
# Try to find the summary if it exists
summary_path = os.path.join(task_dir, "final_summary.json")
if os.path.exists(summary_path):
with open(summary_path, 'r') as f:
task.result = json.load(f)
update_json_status(queue.id, task.id, task.status, task.result)
except Exception as e:
print(f"Error running task {task.id}: {e}")