from flask import Blueprint, request, jsonify from app.models import User, Job from app import db import json import requests import datetime api_bp = Blueprint('api', __name__, url_prefix='/api') @api_bp.route('/submit_job', methods=['POST']) def submit_job(): data = request.get_json() if not data: return jsonify({'error': 'No JSON data provided'}), 400 username = data.get('username') password = data.get('password') branch_name = data.get('branch_name') scenarios = data.get('scenarios') # Validation if not all([username, password, branch_name, scenarios]): return jsonify({'error': 'Missing required fields: username, password, branch_name, scenarios'}), 400 if not isinstance(scenarios, list) or not scenarios: return jsonify({'error': 'Scenarios must be a non-empty list'}), 400 # Authentication user = User.query.filter_by(username=username).first() if not user or not user.check_password(password): return jsonify({'error': 'Invalid credentials'}), 401 try: # Create Job job = Job( user_id=user.id, branch_name=branch_name, scenarios=json.dumps(scenarios), environment='staging', # Default test_mode='simulator', # Default status='waiting' ) db.session.add(job) db.session.commit() # Prepare Remote Trigger remote_queue_id = str(job.id) remote_task_ids = {s: f"{job.id}_{i+1}" for i, s in enumerate(scenarios)} job.remote_queue_id = remote_queue_id job.remote_task_ids = json.dumps(remote_task_ids) db.session.commit() # Payload for Remote Queue # Note: We don't have the scenario map here, so we assume scenario path is same as name or not needed if remote handles it. # However, the existing logic uses a map. If the user provides just names, we might send names as paths. # Let's assume for this API the user knows what they are doing or the remote accepts names. # Based on jobs.py: {remote_task_ids[s]: scenario_map.get(s, s) for s in scenarios} payload = { "source": branch_name, remote_queue_id: [ 'staging', {remote_task_ids[s]: s for s in scenarios} # Use scenario name as path/value ] } # Trigger Remote Queue remote_url = "http://asf-server.duckdns.org:8080/api/queue" try: response = requests.post(remote_url, json=payload, timeout=10) response.raise_for_status() remote_triggered = True except Exception as e: print(f"[ERROR] Failed to trigger remote queue from API: {e}") remote_triggered = False job.queue_log = f"[SYSTEM] Failed to trigger remote queue: {str(e)}" db.session.commit() return jsonify({ 'success': True, 'job_id': job.id, 'status': job.status, 'remote_triggered': remote_triggered, 'message': 'Job submitted successfully' }) except Exception as e: return jsonify({'error': f'Internal server error: {str(e)}'}), 500 @api_bp.route('/job/', methods=['GET']) def get_job_status(job_id): try: job = Job.query.get(job_id) if not job: return jsonify({'error': 'Job not found'}), 404 # Optional: Trigger internal status update to get latest data # We need to import this function. It's in jobs.py but circular imports might be tricky. # For now, let's rely on the background poller or just return what we have. # If we really need it, we can import inside the function. try: from app.routes.jobs import update_job_status_internal update_job_status_internal(job) except Exception as e: print(f"[WARNING] Failed to trigger internal status update: {e}") return jsonify({ 'job_id': job.id, 'status': job.status, 'branch_name': job.branch_name, 'scenarios': json.loads(job.scenarios) if job.scenarios else [], 'remote_results': json.loads(job.remote_results) if job.remote_results else {}, 'created_at': job.submitted_at.isoformat() if job.submitted_at else None, 'completed_at': job.completed_at.isoformat() if job.completed_at else None, 'remote_queue_id': job.remote_queue_id }) except Exception as e: return jsonify({'error': f'Internal server error: {str(e)}'}), 500