Add job processing system with streaming endpoint
Introduced a `processing` router to handle job streaming using server-sent events. Added `Jobs` and `JobStatus` models for managing job-related data, along with database creation logic. Updated the `sample` router to create new job entries during experiment creation.
This commit is contained in:
24
backend/app/routers/processing.py
Normal file
24
backend/app/routers/processing.py
Normal file
@ -0,0 +1,24 @@
|
||||
from fastapi import APIRouter, Depends
|
||||
from sqlalchemy.orm import Session
|
||||
from starlette.responses import StreamingResponse
|
||||
import asyncio
|
||||
import json
|
||||
|
||||
from app.models import JobStatus, Jobs as JobModel
|
||||
from app.dependencies import get_db
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
async def job_event_generator(db):
|
||||
while True:
|
||||
jobs = db.query(JobModel).filter(JobModel.status == JobStatus.TODO).all()
|
||||
|
||||
if jobs:
|
||||
yield json.dumps([job.parameters for job in jobs]) + "\n\n"
|
||||
await asyncio.sleep(5)
|
||||
|
||||
|
||||
@router.get("/jobs/stream")
|
||||
async def stream_jobs(db: Session = Depends(get_db)):
|
||||
return StreamingResponse(job_event_generator(db), media_type="text/event-stream")
|
Reference in New Issue
Block a user