Add job processing system with streaming endpoint

Introduced a `processing` router to handle job streaming using server-sent events. Added `Jobs` and `JobStatus` models for managing job-related data, along with database creation logic. Updated the `sample` router to create new job entries during experiment creation.
This commit is contained in:
GotthardG
2025-04-10 11:53:36 +02:00
parent f54ffd138a
commit fda9142155
5 changed files with 63 additions and 2 deletions

View File

@ -7,10 +7,13 @@ from sqlalchemy import (
JSON,
DateTime,
Boolean,
Enum,
func,
)
from sqlalchemy.orm import relationship
from .database import Base
from datetime import datetime
import enum
class Shipment(Base):
@ -303,3 +306,20 @@ class Results(Base):
# total_refl: int
# unique_refl: int
# #comments: Optional[constr(max_length=200)] = None
class JobStatus(str, enum.Enum):
TODO = "todo"
SUBMITTED = "submitted"
DONE = "done"
class Jobs(Base):
__tablename__ = "jobs"
id = Column(Integer, primary_key=True, index=True)
experiment_parameters_id = Column(Integer, nullable=False)
status = Column(Enum(JobStatus), default=JobStatus.TODO, nullable=False)
parameters = Column(JSON, nullable=False)
created_at = Column(DateTime, server_default=func.now())
updated_at = Column(DateTime, onupdate=func.now())