
Introduced a `processing` router to handle job streaming using server-sent events. Added `Jobs` and `JobStatus` models for managing job-related data, along with database creation logic. Updated the `sample` router to create new job entries during experiment creation.
25 lines
687 B
Python
25 lines
687 B
Python
from fastapi import APIRouter, Depends
|
|
from sqlalchemy.orm import Session
|
|
from starlette.responses import StreamingResponse
|
|
import asyncio
|
|
import json
|
|
|
|
from app.models import JobStatus, Jobs as JobModel
|
|
from app.dependencies import get_db
|
|
|
|
router = APIRouter()
|
|
|
|
|
|
async def job_event_generator(db):
|
|
while True:
|
|
jobs = db.query(JobModel).filter(JobModel.status == JobStatus.TODO).all()
|
|
|
|
if jobs:
|
|
yield json.dumps([job.parameters for job in jobs]) + "\n\n"
|
|
await asyncio.sleep(5)
|
|
|
|
|
|
@router.get("/jobs/stream")
|
|
async def stream_jobs(db: Session = Depends(get_db)):
|
|
return StreamingResponse(job_event_generator(db), media_type="text/event-stream")
|