diff --git a/asf-pc-server/testarena_pc_backend/deploy.sh b/asf-pc-server/testarena_pc_backend/deploy.sh new file mode 100644 index 0000000..5cb5f37 --- /dev/null +++ b/asf-pc-server/testarena_pc_backend/deploy.sh @@ -0,0 +1,44 @@ +#!/bin/bash + +# TestArena Deployment Script +# Run this script with sudo: sudo ./deploy.sh + +set -e + +echo "๐Ÿš€ Starting TestArena Deployment..." + +# 1. Install Dependencies +echo "๐Ÿ“ฆ Installing dependencies..." +apt-get update +apt-get install -y nginx python3-pip python3-venv + +# 2. Set up Python Virtual Environment +echo "๐Ÿ Setting up Python environment..." +python3 -m venv venv +source venv/bin/activate +pip install fastapi uvicorn sqlalchemy + +# 3. Configure Nginx +echo "๐ŸŒ Configuring Nginx..." +cp nginx/testarena.conf /etc/nginx/sites-available/testarena +ln -sf /etc/nginx/sites-available/testarena /etc/nginx/sites-enabled/ +rm -f /etc/nginx/sites-enabled/default + +# 4. Create Data Directory +echo "๐Ÿ“ Creating data directory..." +mkdir -p /home/asf/testarena +chown -R asf:asf /home/asf/testarena +chmod -R 755 /home/asf/testarena + +# 5. Restart Nginx +echo "๐Ÿ”„ Restarting Nginx..." +nginx -t +systemctl restart nginx + +echo "โœ… Deployment complete!" +echo "--------------------------------------------------" +echo "Dashboard: http://asf-server.duckdns.org:8080/" +echo "Results: http://asf-server.duckdns.org:8080/results/" +echo "--------------------------------------------------" +echo "To start the app: source venv/bin/activate && uvicorn testarena_app.main:app --host 0.0.0.0 --port 8000" +echo "To start the worker: source venv/bin/activate && python3 -m testarena_app.worker" diff --git a/asf-pc-server/testarena_pc_backend/deployment_guide.md b/asf-pc-server/testarena_pc_backend/deployment_guide.md new file mode 100644 index 0000000..cb22f05 --- /dev/null +++ b/asf-pc-server/testarena_pc_backend/deployment_guide.md @@ -0,0 +1,80 @@ +# TestArena Deployment & Testing Guide + +This guide explains how to deploy and test the TestArena backend application on your Ubuntu Server. + +## ๐Ÿš€ Deployment Steps + +### 1. Clone the Repository +Ensure you have the code on your server in a directory like `/home/asf/testarena_pc_backend`. + +### 2. Run the Deployment Script +The deployment script automates Nginx configuration and dependency installation. +```bash +sudo chmod +x deploy.sh +sudo ./deploy.sh +``` + +### 3. Start the Application Services +You should run these in the background or using a process manager like `pm2` or `systemd`. + +**Start the API Server:** +```bash +source venv/bin/activate +uvicorn testarena_app.main:app --host 0.0.0.0 --port 8000 +``` + +**Start the Background Worker:** +```bash +source venv/bin/activate +python3 -m testarena_app.worker +``` + +--- + +## ๐Ÿงช Testing the System + +### 1. Verify Dashboard Access +Open your browser and navigate to: +`http://asf-server.duckdns.org:8080/` +You should see the modern, colorful TestArena dashboard. + +### 2. Verify Results Browsing +Navigate to: +`http://asf-server.duckdns.org:8080/results/` +You should see an automatic directory listing of `/home/asf/testarena/`. + +### 3. Test the Queue API +Run the following `curl` command to queue a test task: +```bash +curl -X POST http://asf-server.duckdns.org:8080/api/queue \ +-H "Content-Type: application/json" \ +-d '{ + "test_queue_001": [ + "staging", + { + "task_1": "/home/asf/scenarios/test1.py", + "task_2": "/home/asf/scenarios/test2.py" + } + ] +}' +``` + +### 4. Verify Worker Execution +- Check the dashboard; you should see the new queue appear and its status change from `Waiting` to `Running` and then `Finished`. +- Check the filesystem: + ```bash + ls -R /home/asf/testarena/test_queue_001 + ``` + You should see `queue_status.json` and any results generated by `tpf_execution.py`. + +### 5. Test Abortion +Queue another task and click the **Abort** button on the dashboard. Verify that the status changes to `Aborted` in both the dashboard and the `queue_status.json` file. + +--- + +## ๐Ÿ› ๏ธ Troubleshooting + +- **Nginx Errors**: Check logs with `sudo tail -f /var/log/nginx/error.log`. +- **FastAPI Errors**: Check the terminal where `uvicorn` is running. +- **Permission Issues**: Ensure `/home/asf/testarena` is writable by the user running the app. +- **Port 8080 Blocked**: Ensure your firewall (ufw) allows traffic on port 8080: `sudo ufw allow 8080`. diff --git a/asf-pc-server/testarena_pc_backend/nginx/testarena.conf b/asf-pc-server/testarena_pc_backend/nginx/testarena.conf new file mode 100644 index 0000000..e6cbd39 --- /dev/null +++ b/asf-pc-server/testarena_pc_backend/nginx/testarena.conf @@ -0,0 +1,58 @@ +# TestArena Nginx Configuration +# This file should be placed in /etc/nginx/sites-available/testarena +# and symlinked to /etc/nginx/sites-enabled/testarena + +server { + listen 8080; + server_name _; + + # Security: Prevent directory traversal and restrict symlinks + disable_symlinks on; + + # Root directory for the results (autoindex) + location /results/ { + alias /home/asf/testarena/; + + # Enable autoindex with requested features + autoindex on; + autoindex_exact_size off; # Human-readable sizes + autoindex_localtime on; # Local time + + # Read-only access + limit_except GET { + deny all; + } + + # Prevent execution of scripts + location ~* \.(php|pl|py|sh|cgi)$ { + return 403; + } + } + + # Proxy requests to the FastAPI application + location / { + proxy_pass http://127.0.0.1:8000; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + + # WebSocket support (if needed in future) + proxy_http_version 1.1; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection "upgrade"; + } + + # Custom error pages + error_page 404 /404.html; + location = /404.html { + root /usr/share/nginx/html; + internal; + } + + error_page 500 502 503 504 /50x.html; + location = /50x.html { + root /usr/share/nginx/html; + internal; + } +} diff --git a/asf-pc-server/testarena_pc_backend/testarena_app/database.py b/asf-pc-server/testarena_pc_backend/testarena_app/database.py new file mode 100644 index 0000000..131e2ea --- /dev/null +++ b/asf-pc-server/testarena_pc_backend/testarena_app/database.py @@ -0,0 +1,16 @@ +from sqlalchemy import create_all_engines, create_engine +from sqlalchemy.orm import sessionmaker +import os + +# Using SQLite for simplicity as requested +DATABASE_URL = "sqlite:///d:/ASF - course/ASF_01/ASF_tools/asf-pc-server/testarena_pc_backend/testarena_app/testarena.db" + +engine = create_engine(DATABASE_URL, connect_args={"check_same_thread": False}) +SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) + +def get_db(): + db = SessionLocal() + try: + yield db + finally: + db.close() diff --git a/asf-pc-server/testarena_pc_backend/testarena_app/main.py b/asf-pc-server/testarena_pc_backend/testarena_app/main.py new file mode 100644 index 0000000..d0af081 --- /dev/null +++ b/asf-pc-server/testarena_pc_backend/testarena_app/main.py @@ -0,0 +1,139 @@ +from fastapi import FastAPI, Depends, HTTPException, BackgroundTasks +from fastapi.staticfiles import StaticFiles +from fastapi.responses import FileResponse +from sqlalchemy.orm import Session +import os +import json +import uuid +from typing import Dict, List +from . import models, database + +app = FastAPI(title="TestArena API") + +# Mount static files +static_dir = os.path.join(os.path.dirname(__file__), "static") +os.makedirs(static_dir, exist_ok=True) +app.mount("/static", StaticFiles(directory=static_dir), name="static") + +# Base directory for data as requested +BASE_DATA_DIR = "/home/asf/testarena" +# For local development on Windows, we might need to adjust this, +# but I'll stick to the user's requirement for the final version. +if os.name == 'nt': + BASE_DATA_DIR = "d:/ASF - course/ASF_01/ASF_tools/asf-pc-server/testarena_pc_backend/testarena_data" + +# Ensure base directory exists +os.makedirs(BASE_DATA_DIR, exist_ok=True) + +# Initialize database +models.Base.metadata.create_all(bind=database.engine) + +@app.post("/api/queue") +async def queue_task(payload: Dict, db: Session = Depends(database.get_db)): + """ + Input json contain { :[environment, "" : "],} + """ + try: + queue_id = list(payload.keys())[0] + data = payload[queue_id] + environment = data[0] + tasks_data = data[1] # This is a dict {"TASK_ID": "path"} + + # 1. Create folder + queue_dir = os.path.join(BASE_DATA_DIR, queue_id) + os.makedirs(queue_dir, exist_ok=True) + + # 2. Create queue_status.json + status_file = os.path.join(queue_dir, "queue_status.json") + queue_status = { + "queue_id": queue_id, + "status": "Waiting", + "tasks": {} + } + + # 3. Save to database and prepare status file + new_queue = models.Queue(id=queue_id, environment=environment, status="Waiting") + db.add(new_queue) + + for task_id, scenario_path in tasks_data.items(): + new_task = models.Task(id=task_id, queue_id=queue_id, scenario_path=scenario_path, status="Waiting") + db.add(new_task) + queue_status["tasks"][task_id] = "Waiting" + + with open(status_file, 'w') as f: + json.dump(queue_status, f, indent=4) + + db.commit() + return {"status": "Queue OK", "queue_id": queue_id} + except Exception as e: + return {"status": "Error", "message": str(e)} + +@app.get("/api/status/{id}") +async def get_status(id: str, db: Session = Depends(database.get_db)): + # Check if it's a queue ID + queue = db.query(models.Queue).filter(models.Queue.id == id).first() + if queue: + return {"id": id, "type": "queue", "status": queue.status} + + # Check if it's a task ID + task = db.query(models.Task).filter(models.Task.id == id).first() + if task: + return {"id": id, "type": "task", "status": task.status} + + raise HTTPException(status_code=404, detail="ID not found") + +@app.post("/api/abort/{id}") +async def abort_task(id: str, db: Session = Depends(database.get_db)): + # Abort queue + queue = db.query(models.Queue).filter(models.Queue.id == id).first() + if queue: + queue.status = "Aborted" + # Abort all tasks in queue + tasks = db.query(models.Task).filter(models.Task.queue_id == id).all() + for t in tasks: + if t.status in ["Waiting", "Running"]: + t.status = "Aborted" + + # Update queue_status.json + queue_dir = os.path.join(BASE_DATA_DIR, id) + status_file = os.path.join(queue_dir, "queue_status.json") + if os.path.exists(status_file): + with open(status_file, 'r') as f: + data = json.load(f) + data["status"] = "Aborted" + for tid in data["tasks"]: + if data["tasks"][tid] in ["Waiting", "Running"]: + data["tasks"][tid] = "Aborted" + with open(status_file, 'w') as f: + json.dump(data, f, indent=4) + + db.commit() + return {"id": id, "status": "Aborted"} + + # Abort single task + task = db.query(models.Task).filter(models.Task.id == id).first() + if task: + task.status = "Aborted" + # Update queue_status.json + queue_dir = os.path.join(BASE_DATA_DIR, task.queue_id) + status_file = os.path.join(queue_dir, "queue_status.json") + if os.path.exists(status_file): + with open(status_file, 'r') as f: + data = json.load(f) + data["tasks"][id] = "Aborted" + with open(status_file, 'w') as f: + json.dump(data, f, indent=4) + + db.commit() + return {"id": id, "status": "Aborted"} + + raise HTTPException(status_code=404, detail="ID not found") + +@app.get("/api/queues") +async def list_queues(db: Session = Depends(database.get_db)): + queues = db.query(models.Queue).order_by(models.Queue.created_at.desc()).all() + return queues + +@app.get("/") +async def root(): + return FileResponse(os.path.join(static_dir, "index.html")) diff --git a/asf-pc-server/testarena_pc_backend/testarena_app/models.py b/asf-pc-server/testarena_pc_backend/testarena_app/models.py new file mode 100644 index 0000000..c36ffc4 --- /dev/null +++ b/asf-pc-server/testarena_pc_backend/testarena_app/models.py @@ -0,0 +1,27 @@ +from sqlalchemy import Column, Integer, String, DateTime, ForeignKey, JSON +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import relationship +import datetime + +Base = declarative_base() + +class Queue(Base): + __tablename__ = "queues" + + id = Column(String, primary_key=True, index=True) + status = Column(String, default="Waiting") # Finished, Waiting, Running, Aborted + created_at = Column(DateTime, default=datetime.datetime.utcnow) + environment = Column(String) + + tasks = relationship("Task", back_populates="queue", cascade="all, delete-orphan") + +class Task(Base): + __tablename__ = "tasks" + + id = Column(String, primary_key=True, index=True) + queue_id = Column(String, ForeignKey("queues.id")) + scenario_path = Column(String) + status = Column(String, default="Waiting") # Finished, Waiting, Running, Aborted + result = Column(JSON, nullable=True) + + queue = relationship("Queue", back_populates="tasks") diff --git a/asf-pc-server/testarena_pc_backend/testarena_app/static/index.html b/asf-pc-server/testarena_pc_backend/testarena_app/static/index.html new file mode 100644 index 0000000..ffb34c6 --- /dev/null +++ b/asf-pc-server/testarena_pc_backend/testarena_app/static/index.html @@ -0,0 +1,407 @@ + + + + + + + TestArena | Modern Dashboard + + + + + +
+
+
+ + +
+
+ Connecting... +
+
+ +
+
+

+ + + + + + Queue Monitor +

+ + + + + + + + + + + + +
Queue IDEnvironmentStatusActions
+
+ +
+

+ + + + Live System Logs +

+
+
+ 23:34:52 + System initialized. Waiting for connection... +
+
+
+
+
+ + + + + \ No newline at end of file diff --git a/asf-pc-server/testarena_pc_backend/testarena_app/worker.py b/asf-pc-server/testarena_pc_backend/testarena_app/worker.py new file mode 100644 index 0000000..d9805ce --- /dev/null +++ b/asf-pc-server/testarena_pc_backend/testarena_app/worker.py @@ -0,0 +1,98 @@ +import time +import subprocess +import json +import os +from sqlalchemy.orm import Session +from . import models, database + +# Base directory for data +BASE_DATA_DIR = "/home/asf/testarena" +if os.name == 'nt': + BASE_DATA_DIR = "d:/ASF - course/ASF_01/ASF_tools/asf-pc-server/testarena_pc_backend/testarena_data" + +def update_json_status(queue_id, task_id, status, result=None): + queue_dir = os.path.join(BASE_DATA_DIR, queue_id) + status_file = os.path.join(queue_dir, "queue_status.json") + if os.path.exists(status_file): + with open(status_file, 'r') as f: + data = json.load(f) + + if task_id: + data["tasks"][task_id] = status + else: + data["status"] = status + + if result: + data["results"] = data.get("results", {}) + data["results"][task_id] = result + + with open(status_file, 'w') as f: + json.dump(data, f, indent=4) + +def run_worker(): + print("Worker started...") + while True: + db = database.SessionLocal() + try: + # Get next waiting queue + queue = db.query(models.Queue).filter(models.Queue.status == "Waiting").order_by(models.Queue.created_at).first() + + if queue: + print(f"Processing queue: {queue.id}") + queue.status = "Running" + update_json_status(queue.id, None, "Running") + db.commit() + + tasks = db.query(models.Task).filter(models.Task.queue_id == queue.id, models.Task.status == "Waiting").all() + + for task in tasks: + # Check if queue was aborted mid-way + db.refresh(queue) + if queue.status == "Aborted": + break + + print(f"Running task: {task.id}") + task.status = "Running" + update_json_status(queue.id, task.id, "Running") + db.commit() + + try: + # Run tpf_execution.py [queue_id, scenario_path, task_id] + # Assuming tpf_execution.py is in the parent directory or accessible + script_path = "tpf_execution.py" + # For testing, let's assume it's in the same dir as the app or parent + cmd = ["python", script_path, queue.id, task.scenario_path, task.id] + + result = subprocess.run(cmd, capture_output=True, text=True) + + # Parse result if it returns json + try: + execution_result = json.loads(result.stdout) + except: + execution_result = {"output": result.stdout, "error": result.stderr} + + task.status = "Finished" + task.result = execution_result + update_json_status(queue.id, task.id, "Finished", execution_result) + + except Exception as e: + print(f"Error running task {task.id}: {e}") + task.status = "Error" + update_json_status(queue.id, task.id, "Error") + + db.commit() + + if queue.status != "Aborted": + queue.status = "Finished" + update_json_status(queue.id, None, "Finished") + db.commit() + + time.sleep(5) # Poll every 5 seconds + except Exception as e: + print(f"Worker error: {e}") + time.sleep(10) + finally: + db.close() + +if __name__ == "__main__": + run_worker() diff --git a/asf-pc-server/testarena_pc_backend/tpf_execution.py b/asf-pc-server/testarena_pc_backend/tpf_execution.py new file mode 100644 index 0000000..9ede2cc --- /dev/null +++ b/asf-pc-server/testarena_pc_backend/tpf_execution.py @@ -0,0 +1,32 @@ +import sys +import json +import time +import random + +def main(): + if len(sys.argv) < 4: + print("Usage: python tpf_execution.py ") + sys.exit(1) + + queue_id = sys.argv[1] + scenario_path = sys.argv[2] + task_id = sys.argv[3] + + print(f"Starting execution for Task: {task_id} in Queue: {queue_id}") + print(f"Scenario: {scenario_path}") + + # Simulate work + duration = random.randint(2, 5) + time.sleep(duration) + + result = { + "task_id": task_id, + "status": "Success", + "duration": duration, + "details": f"Scenario {scenario_path} executed successfully." + } + + print(json.dumps(result)) + +if __name__ == "__main__": + main()