continue implementation
This commit is contained in:
14
deploy.sh
14
deploy.sh
@@ -35,11 +35,21 @@ else
|
|||||||
echo "⚠️ Nginx configuration not found, skipping..."
|
echo "⚠️ Nginx configuration not found, skipping..."
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# 4. Create Data Directory
|
# 4. Create Data and TPF Directories
|
||||||
echo "📁 Creating data directory..."
|
echo "📁 Creating directories..."
|
||||||
mkdir -p /home/asf/testarena
|
mkdir -p /home/asf/testarena
|
||||||
|
mkdir -p /home/asf/testarena_backend/TPF
|
||||||
chown -R asf:asf /home/asf/testarena
|
chown -R asf:asf /home/asf/testarena
|
||||||
|
chown -R asf:asf /home/asf/testarena_backend/TPF
|
||||||
chmod -R 755 /home/asf/testarena
|
chmod -R 755 /home/asf/testarena
|
||||||
|
chmod -R 755 /home/asf/testarena_backend/TPF
|
||||||
|
|
||||||
|
# Copy scripts to TPF
|
||||||
|
cp gitea_repo_controller.sh /home/asf/testarena_backend/TPF/
|
||||||
|
cp scenario_execution.py /home/asf/testarena_backend/TPF/
|
||||||
|
cp scenario_exe_parser.py /home/asf/testarena_backend/TPF/
|
||||||
|
cp test_execution.sh /home/asf/testarena_backend/TPF/
|
||||||
|
chmod +x /home/asf/testarena_backend/TPF/*.sh
|
||||||
|
|
||||||
# 5. Set up Systemd Services
|
# 5. Set up Systemd Services
|
||||||
echo "⚙️ Setting up Systemd services..."
|
echo "⚙️ Setting up Systemd services..."
|
||||||
|
|||||||
@@ -72,7 +72,10 @@ REPORT_TEMPLATE = """
|
|||||||
|
|
||||||
def run_test_suite(tasks):
|
def run_test_suite(tasks):
|
||||||
aggregated_results = {}
|
aggregated_results = {}
|
||||||
shell_script = "./TPF/test_execution.sh"
|
# Use path relative to this script
|
||||||
|
script_dir = os.path.dirname(os.path.abspath(__file__))
|
||||||
|
shell_script = os.path.join(script_dir, "test_execution.sh")
|
||||||
|
|
||||||
if os.name != 'nt':
|
if os.name != 'nt':
|
||||||
subprocess.run(["chmod", "+x", shell_script])
|
subprocess.run(["chmod", "+x", shell_script])
|
||||||
|
|
||||||
|
|||||||
@@ -31,10 +31,13 @@ models.Base.metadata.create_all(bind=database.engine)
|
|||||||
@app.post("/api/queue")
|
@app.post("/api/queue")
|
||||||
async def queue_task(payload: Dict, db: Session = Depends(database.get_db)):
|
async def queue_task(payload: Dict, db: Session = Depends(database.get_db)):
|
||||||
"""
|
"""
|
||||||
Input json contain {<queue_ID> :[environment, "<TASK_ID>" : "<path to scenario>],}
|
Input json contain {"source": "<branch_name>", <queue_ID> :[environment, {"<TASK_ID>" : "<path to scenario>"},]}
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
queue_id = list(payload.keys())[0]
|
source = payload.get("source", "main")
|
||||||
|
# Find the queue_id key (it's the one that isn't "source")
|
||||||
|
queue_id = next(k for k in payload.keys() if k != "source")
|
||||||
|
|
||||||
data = payload[queue_id]
|
data = payload[queue_id]
|
||||||
environment = data[0]
|
environment = data[0]
|
||||||
tasks_data = data[1] # This is a dict {"TASK_ID": "path"}
|
tasks_data = data[1] # This is a dict {"TASK_ID": "path"}
|
||||||
@@ -47,12 +50,13 @@ async def queue_task(payload: Dict, db: Session = Depends(database.get_db)):
|
|||||||
status_file = os.path.join(queue_dir, "queue_status.json")
|
status_file = os.path.join(queue_dir, "queue_status.json")
|
||||||
queue_status = {
|
queue_status = {
|
||||||
"queue_id": queue_id,
|
"queue_id": queue_id,
|
||||||
|
"source": source,
|
||||||
"status": "Waiting",
|
"status": "Waiting",
|
||||||
"tasks": {}
|
"tasks": {}
|
||||||
}
|
}
|
||||||
|
|
||||||
# 3. Save to database and prepare status file
|
# 3. Save to database and prepare status file
|
||||||
new_queue = models.Queue(id=queue_id, environment=environment, status="Waiting")
|
new_queue = models.Queue(id=queue_id, environment=environment, source=source, status="Waiting")
|
||||||
db.add(new_queue)
|
db.add(new_queue)
|
||||||
|
|
||||||
for task_id, scenario_path in tasks_data.items():
|
for task_id, scenario_path in tasks_data.items():
|
||||||
|
|||||||
@@ -12,6 +12,7 @@ class Queue(Base):
|
|||||||
status = Column(String, default="Waiting") # Finished, Waiting, Running, Aborted
|
status = Column(String, default="Waiting") # Finished, Waiting, Running, Aborted
|
||||||
created_at = Column(DateTime, default=datetime.datetime.utcnow)
|
created_at = Column(DateTime, default=datetime.datetime.utcnow)
|
||||||
environment = Column(String)
|
environment = Column(String)
|
||||||
|
source = Column(String) # Branch name
|
||||||
|
|
||||||
tasks = relationship("Task", back_populates="queue", cascade="all, delete-orphan")
|
tasks = relationship("Task", back_populates="queue", cascade="all, delete-orphan")
|
||||||
|
|
||||||
|
|||||||
@@ -29,6 +29,25 @@ def update_json_status(queue_id, task_id, status, result=None):
|
|||||||
with open(status_file, 'w') as f:
|
with open(status_file, 'w') as f:
|
||||||
json.dump(data, f, indent=4)
|
json.dump(data, f, indent=4)
|
||||||
|
|
||||||
|
def run_command_with_logging(cmd, log_file, cwd=None, env=None):
|
||||||
|
"""Runs a command, logs output to file and stdout."""
|
||||||
|
with open(log_file, "a") as f:
|
||||||
|
process = subprocess.Popen(
|
||||||
|
cmd,
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
stderr=subprocess.STDOUT,
|
||||||
|
shell=True,
|
||||||
|
text=True,
|
||||||
|
cwd=cwd,
|
||||||
|
env=env
|
||||||
|
)
|
||||||
|
for line in process.stdout:
|
||||||
|
print(line, end="")
|
||||||
|
f.write(line)
|
||||||
|
f.flush()
|
||||||
|
process.wait()
|
||||||
|
return process.returncode
|
||||||
|
|
||||||
def run_worker():
|
def run_worker():
|
||||||
print("Worker started...")
|
print("Worker started...")
|
||||||
while True:
|
while True:
|
||||||
@@ -38,11 +57,28 @@ def run_worker():
|
|||||||
queue = db.query(models.Queue).filter(models.Queue.status == "Waiting").order_by(models.Queue.created_at).first()
|
queue = db.query(models.Queue).filter(models.Queue.status == "Waiting").order_by(models.Queue.created_at).first()
|
||||||
|
|
||||||
if queue:
|
if queue:
|
||||||
print(f"Processing queue: {queue.id}")
|
print(f"Processing queue: {queue.id} (Branch: {queue.source})")
|
||||||
queue.status = "Running"
|
queue.status = "Running"
|
||||||
update_json_status(queue.id, None, "Running")
|
update_json_status(queue.id, None, "Running")
|
||||||
db.commit()
|
db.commit()
|
||||||
|
|
||||||
|
queue_dir = os.path.join(BASE_DATA_DIR, queue.id)
|
||||||
|
os.makedirs(queue_dir, exist_ok=True)
|
||||||
|
queue_log = os.path.join(queue_dir, "queue_log.txt")
|
||||||
|
|
||||||
|
# 0- Checkout branch
|
||||||
|
print(f"Checking out branch: {queue.source}")
|
||||||
|
checkout_cmd = f"./TPF/gitea_repo_controller.sh checkout {queue.source}"
|
||||||
|
run_command_with_logging(checkout_cmd, queue_log)
|
||||||
|
|
||||||
|
# 1-5 Build software
|
||||||
|
print("Building software...")
|
||||||
|
# We need to source the IDF export script and then build.
|
||||||
|
# Using a single shell command to maintain environment.
|
||||||
|
build_cmd = f"bash -c 'source $HOME/esp/esp-idf/export.sh && cd TPF/Sensor_hub_repo && idf.py build && idf.py qemu'"
|
||||||
|
run_command_with_logging(build_cmd, queue_log)
|
||||||
|
|
||||||
|
# 9- Loop for each task
|
||||||
tasks = db.query(models.Task).filter(models.Task.queue_id == queue.id, models.Task.status == "Waiting").all()
|
tasks = db.query(models.Task).filter(models.Task.queue_id == queue.id, models.Task.status == "Waiting").all()
|
||||||
|
|
||||||
for task in tasks:
|
for task in tasks:
|
||||||
@@ -57,23 +93,33 @@ def run_worker():
|
|||||||
db.commit()
|
db.commit()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Run tpf_execution.py [queue_id, scenario_path, task_id]
|
# Run scenario_execution.py queue_id scenario_path task_id
|
||||||
# Assuming tpf_execution.py is in the parent directory or accessible
|
# The user said it's in TPF/scenario_execution.py
|
||||||
script_path = "tpf_execution.py"
|
script_path = "./TPF/scenario_execution.py"
|
||||||
# For testing, let's assume it's in the same dir as the app or parent
|
# Use the same python environment
|
||||||
cmd = ["python", script_path, queue.id, task.scenario_path, task.id]
|
cmd = f"python3 {script_path} {queue.id} {task.scenario_path} {task.id}"
|
||||||
|
|
||||||
result = subprocess.run(cmd, capture_output=True, text=True)
|
# We want to capture this in the task log too
|
||||||
|
task_dir = os.path.join(queue_dir, task.id)
|
||||||
|
os.makedirs(task_dir, exist_ok=True)
|
||||||
|
task_log = os.path.join(task_dir, f"{task.id}-logging.html")
|
||||||
|
|
||||||
# Parse result if it returns json
|
# For now, let's just log to stdout and the queue log
|
||||||
try:
|
# scenario_execution.py already generates its own reports.
|
||||||
execution_result = json.loads(result.stdout)
|
ret = run_command_with_logging(cmd, queue_log)
|
||||||
except:
|
|
||||||
execution_result = {"output": result.stdout, "error": result.stderr}
|
|
||||||
|
|
||||||
|
if ret == 0:
|
||||||
task.status = "Finished"
|
task.status = "Finished"
|
||||||
task.result = execution_result
|
else:
|
||||||
update_json_status(queue.id, task.id, "Finished", execution_result)
|
task.status = "Error"
|
||||||
|
|
||||||
|
# Try to find the summary if it exists
|
||||||
|
summary_path = os.path.join(task_dir, "final_summary.json")
|
||||||
|
if os.path.exists(summary_path):
|
||||||
|
with open(summary_path, 'r') as f:
|
||||||
|
task.result = json.load(f)
|
||||||
|
|
||||||
|
update_json_status(queue.id, task.id, task.status, task.result)
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"Error running task {task.id}: {e}")
|
print(f"Error running task {task.id}: {e}")
|
||||||
|
|||||||
Reference in New Issue
Block a user