continue implementation

This commit is contained in:
2025-12-27 20:01:45 +01:00
parent 79e4825b44
commit 58be2d600c
5 changed files with 85 additions and 21 deletions

View File

@@ -29,6 +29,25 @@ def update_json_status(queue_id, task_id, status, result=None):
with open(status_file, 'w') as f:
json.dump(data, f, indent=4)
def run_command_with_logging(cmd, log_file, cwd=None, env=None):
"""Runs a command, logs output to file and stdout."""
with open(log_file, "a") as f:
process = subprocess.Popen(
cmd,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
shell=True,
text=True,
cwd=cwd,
env=env
)
for line in process.stdout:
print(line, end="")
f.write(line)
f.flush()
process.wait()
return process.returncode
def run_worker():
print("Worker started...")
while True:
@@ -38,11 +57,28 @@ def run_worker():
queue = db.query(models.Queue).filter(models.Queue.status == "Waiting").order_by(models.Queue.created_at).first()
if queue:
print(f"Processing queue: {queue.id}")
print(f"Processing queue: {queue.id} (Branch: {queue.source})")
queue.status = "Running"
update_json_status(queue.id, None, "Running")
db.commit()
queue_dir = os.path.join(BASE_DATA_DIR, queue.id)
os.makedirs(queue_dir, exist_ok=True)
queue_log = os.path.join(queue_dir, "queue_log.txt")
# 0- Checkout branch
print(f"Checking out branch: {queue.source}")
checkout_cmd = f"./TPF/gitea_repo_controller.sh checkout {queue.source}"
run_command_with_logging(checkout_cmd, queue_log)
# 1-5 Build software
print("Building software...")
# We need to source the IDF export script and then build.
# Using a single shell command to maintain environment.
build_cmd = f"bash -c 'source $HOME/esp/esp-idf/export.sh && cd TPF/Sensor_hub_repo && idf.py build && idf.py qemu'"
run_command_with_logging(build_cmd, queue_log)
# 9- Loop for each task
tasks = db.query(models.Task).filter(models.Task.queue_id == queue.id, models.Task.status == "Waiting").all()
for task in tasks:
@@ -57,23 +93,33 @@ def run_worker():
db.commit()
try:
# Run tpf_execution.py [queue_id, scenario_path, task_id]
# Assuming tpf_execution.py is in the parent directory or accessible
script_path = "tpf_execution.py"
# For testing, let's assume it's in the same dir as the app or parent
cmd = ["python", script_path, queue.id, task.scenario_path, task.id]
# Run scenario_execution.py queue_id scenario_path task_id
# The user said it's in TPF/scenario_execution.py
script_path = "./TPF/scenario_execution.py"
# Use the same python environment
cmd = f"python3 {script_path} {queue.id} {task.scenario_path} {task.id}"
result = subprocess.run(cmd, capture_output=True, text=True)
# We want to capture this in the task log too
task_dir = os.path.join(queue_dir, task.id)
os.makedirs(task_dir, exist_ok=True)
task_log = os.path.join(task_dir, f"{task.id}-logging.html")
# Parse result if it returns json
try:
execution_result = json.loads(result.stdout)
except:
execution_result = {"output": result.stdout, "error": result.stderr}
# For now, let's just log to stdout and the queue log
# scenario_execution.py already generates its own reports.
ret = run_command_with_logging(cmd, queue_log)
task.status = "Finished"
task.result = execution_result
update_json_status(queue.id, task.id, "Finished", execution_result)
if ret == 0:
task.status = "Finished"
else:
task.status = "Error"
# Try to find the summary if it exists
summary_path = os.path.join(task_dir, "final_summary.json")
if os.path.exists(summary_path):
with open(summary_path, 'r') as f:
task.result = json.load(f)
update_json_status(queue.id, task.id, task.status, task.result)
except Exception as e:
print(f"Error running task {task.id}: {e}")