Compare commits
19 Commits
866139baea
...
707c98c5ce
Author | SHA1 | Date | |
---|---|---|---|
707c98c5ce | |||
6a0953c913 | |||
0fa038be94 | |||
e341459590 | |||
9e5734f060 | |||
26f8870d04 | |||
a169a39edd | |||
4328b84795 | |||
102a11eed7 | |||
db6474c86a | |||
b13a3e23f4 | |||
a1b857b78a | |||
9e875c5a04 | |||
b3847a0bf0 | |||
38a5c85b37 | |||
58dcaf892f | |||
57de665c7b | |||
9af2e84f9e | |||
3eb4050d82 |
@ -395,10 +395,11 @@ beamtimes = [
|
|||||||
Beamtime(
|
Beamtime(
|
||||||
id=1,
|
id=1,
|
||||||
pgroups="p20001",
|
pgroups="p20001",
|
||||||
|
shift="morning",
|
||||||
beamtime_name="p20001-test",
|
beamtime_name="p20001-test",
|
||||||
beamline="X06DA",
|
beamline="X06DA",
|
||||||
start_date=datetime.strptime("06.02.2025", "%d.%m.%Y").date(),
|
start_date=datetime.strptime("06.05.2025", "%d.%m.%Y").date(),
|
||||||
end_date=datetime.strptime("07.02.2025", "%d.%m.%Y").date(),
|
end_date=datetime.strptime("06.05.2025", "%d.%m.%Y").date(),
|
||||||
status="confirmed",
|
status="confirmed",
|
||||||
comments="this is a test beamtime",
|
comments="this is a test beamtime",
|
||||||
proposal_id=1,
|
proposal_id=1,
|
||||||
@ -406,16 +407,43 @@ beamtimes = [
|
|||||||
),
|
),
|
||||||
Beamtime(
|
Beamtime(
|
||||||
id=2,
|
id=2,
|
||||||
pgroups="p20002",
|
pgroups="p20001",
|
||||||
|
shift="afternoon",
|
||||||
beamtime_name="p20001-test",
|
beamtime_name="p20001-test",
|
||||||
beamline="X06DA",
|
beamline="X06DA",
|
||||||
start_date=datetime.strptime("07.02.2025", "%d.%m.%Y").date(),
|
start_date=datetime.strptime("06.05.2025", "%d.%m.%Y").date(),
|
||||||
end_date=datetime.strptime("08.02.2025", "%d.%m.%Y").date(),
|
end_date=datetime.strptime("07.05.2025", "%d.%m.%Y").date(),
|
||||||
status="confirmed",
|
status="confirmed",
|
||||||
comments="this is a test beamtime",
|
comments="this is a test beamtime",
|
||||||
proposal_id=2,
|
proposal_id=2,
|
||||||
local_contact_id=2,
|
local_contact_id=2,
|
||||||
),
|
),
|
||||||
|
Beamtime(
|
||||||
|
id=3,
|
||||||
|
pgroups="p20003",
|
||||||
|
shift="morning",
|
||||||
|
beamtime_name="p20003-test",
|
||||||
|
beamline="X06SA",
|
||||||
|
start_date=datetime.strptime("06.05.2025", "%d.%m.%Y").date(),
|
||||||
|
end_date=datetime.strptime("06.05.2025", "%d.%m.%Y").date(),
|
||||||
|
status="confirmed",
|
||||||
|
comments="this is a test beamtime",
|
||||||
|
proposal_id=1,
|
||||||
|
local_contact_id=1,
|
||||||
|
),
|
||||||
|
Beamtime(
|
||||||
|
id=4,
|
||||||
|
pgroups="p20002",
|
||||||
|
shift="night",
|
||||||
|
beamtime_name="p20002-test",
|
||||||
|
beamline="X06DA",
|
||||||
|
start_date=datetime.strptime("08.05.2025", "%d.%m.%Y").date(),
|
||||||
|
end_date=datetime.strptime("08.05.2025", "%d.%m.%Y").date(),
|
||||||
|
status="confirmed",
|
||||||
|
comments="this is a test beamtime",
|
||||||
|
proposal_id=3,
|
||||||
|
local_contact_id=2,
|
||||||
|
),
|
||||||
]
|
]
|
||||||
|
|
||||||
# Define shipments
|
# Define shipments
|
||||||
@ -675,8 +703,31 @@ pucks = [
|
|||||||
# Define samples
|
# Define samples
|
||||||
samples = []
|
samples = []
|
||||||
sample_id_counter = 1
|
sample_id_counter = 1
|
||||||
|
# Assign a beamtime to each dewar
|
||||||
|
dewar_to_beamtime = {
|
||||||
|
dewar.id: random.choice([1, 2, 3, 4])
|
||||||
|
for dewar in dewars # Or use actual beamtime ids
|
||||||
|
}
|
||||||
|
|
||||||
|
for dewar in dewars:
|
||||||
|
assigned_beamtime_obj = next(
|
||||||
|
b for b in beamtimes if b.id == dewar_to_beamtime[dewar.id]
|
||||||
|
)
|
||||||
|
dewar.beamtimes = [assigned_beamtime_obj]
|
||||||
|
|
||||||
for puck in pucks:
|
for puck in pucks:
|
||||||
|
assigned_beamtime_obj = next(
|
||||||
|
b for b in beamtimes if b.id == dewar_to_beamtime[puck.dewar_id]
|
||||||
|
)
|
||||||
|
puck.beamtimes = [assigned_beamtime_obj]
|
||||||
|
|
||||||
|
|
||||||
|
for puck in pucks:
|
||||||
|
dewar_id = puck.dewar_id # Assuming puck has dewar_id
|
||||||
|
assigned_beamtime = dewar_to_beamtime[dewar_id] # this is the id (int)
|
||||||
|
# Fix here: use assigned_beamtime (which is the id)
|
||||||
|
assigned_beamtime_obj = next(b for b in beamtimes if b.id == assigned_beamtime)
|
||||||
|
|
||||||
positions_with_samples = random.randint(1, 16)
|
positions_with_samples = random.randint(1, 16)
|
||||||
occupied_positions = random.sample(range(1, 17), positions_with_samples)
|
occupied_positions = random.sample(range(1, 17), positions_with_samples)
|
||||||
|
|
||||||
@ -688,9 +739,13 @@ for puck in pucks:
|
|||||||
position=pos,
|
position=pos,
|
||||||
puck_id=puck.id,
|
puck_id=puck.id,
|
||||||
)
|
)
|
||||||
|
sample.beamtimes.append(
|
||||||
|
assigned_beamtime_obj
|
||||||
|
) # assigned_beamtime_obj is a Beamtime instance
|
||||||
samples.append(sample)
|
samples.append(sample)
|
||||||
sample_id_counter += 1
|
sample_id_counter += 1
|
||||||
|
|
||||||
|
|
||||||
# Define possible event types for samples
|
# Define possible event types for samples
|
||||||
event_types = ["Mounting", "Failed", "Unmounting", "Lost"]
|
event_types = ["Mounting", "Failed", "Unmounting", "Lost"]
|
||||||
|
|
||||||
|
@ -7,8 +7,9 @@ from sqlalchemy import (
|
|||||||
JSON,
|
JSON,
|
||||||
DateTime,
|
DateTime,
|
||||||
Boolean,
|
Boolean,
|
||||||
Enum,
|
|
||||||
func,
|
func,
|
||||||
|
Enum,
|
||||||
|
Table,
|
||||||
)
|
)
|
||||||
from sqlalchemy.orm import relationship
|
from sqlalchemy.orm import relationship
|
||||||
from .database import Base
|
from .database import Base
|
||||||
@ -16,6 +17,26 @@ from datetime import datetime
|
|||||||
import enum
|
import enum
|
||||||
|
|
||||||
|
|
||||||
|
dewar_beamtime_association = Table(
|
||||||
|
"dewar_beamtime_association",
|
||||||
|
Base.metadata,
|
||||||
|
Column("dewar_id", Integer, ForeignKey("dewars.id")),
|
||||||
|
Column("beamtime_id", Integer, ForeignKey("beamtimes.id")),
|
||||||
|
)
|
||||||
|
puck_beamtime_association = Table(
|
||||||
|
"puck_beamtime_association",
|
||||||
|
Base.metadata,
|
||||||
|
Column("puck_id", Integer, ForeignKey("pucks.id")),
|
||||||
|
Column("beamtime_id", Integer, ForeignKey("beamtimes.id")),
|
||||||
|
)
|
||||||
|
sample_beamtime_association = Table(
|
||||||
|
"sample_beamtime_association",
|
||||||
|
Base.metadata,
|
||||||
|
Column("sample_id", Integer, ForeignKey("samples.id")),
|
||||||
|
Column("beamtime_id", Integer, ForeignKey("beamtimes.id")),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class Shipment(Base):
|
class Shipment(Base):
|
||||||
__tablename__ = "shipments"
|
__tablename__ = "shipments"
|
||||||
|
|
||||||
@ -96,6 +117,7 @@ class Dewar(Base):
|
|||||||
id = Column(Integer, primary_key=True, index=True, autoincrement=True)
|
id = Column(Integer, primary_key=True, index=True, autoincrement=True)
|
||||||
pgroups = Column(String(255), nullable=False)
|
pgroups = Column(String(255), nullable=False)
|
||||||
dewar_name = Column(String(255), nullable=False)
|
dewar_name = Column(String(255), nullable=False)
|
||||||
|
created_at = Column(DateTime, default=datetime.now, nullable=False)
|
||||||
dewar_type_id = Column(Integer, ForeignKey("dewar_types.id"), nullable=True)
|
dewar_type_id = Column(Integer, ForeignKey("dewar_types.id"), nullable=True)
|
||||||
dewar_serial_number_id = Column(
|
dewar_serial_number_id = Column(
|
||||||
Integer, ForeignKey("dewar_serial_numbers.id"), nullable=True
|
Integer, ForeignKey("dewar_serial_numbers.id"), nullable=True
|
||||||
@ -119,8 +141,9 @@ class Dewar(Base):
|
|||||||
beamline_location = None
|
beamline_location = None
|
||||||
local_contact_id = Column(Integer, ForeignKey("local_contacts.id"), nullable=True)
|
local_contact_id = Column(Integer, ForeignKey("local_contacts.id"), nullable=True)
|
||||||
local_contact = relationship("LocalContact")
|
local_contact = relationship("LocalContact")
|
||||||
beamtime = relationship("Beamtime", back_populates="dewars")
|
beamtimes = relationship(
|
||||||
beamtime_id = Column(Integer, ForeignKey("beamtimes.id"), nullable=True)
|
"Beamtime", secondary=dewar_beamtime_association, back_populates="dewars"
|
||||||
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def number_of_pucks(self) -> int:
|
def number_of_pucks(self) -> int:
|
||||||
@ -154,6 +177,9 @@ class Puck(Base):
|
|||||||
dewar = relationship("Dewar", back_populates="pucks")
|
dewar = relationship("Dewar", back_populates="pucks")
|
||||||
samples = relationship("Sample", back_populates="puck")
|
samples = relationship("Sample", back_populates="puck")
|
||||||
events = relationship("PuckEvent", back_populates="puck")
|
events = relationship("PuckEvent", back_populates="puck")
|
||||||
|
beamtimes = relationship(
|
||||||
|
"Beamtime", secondary=puck_beamtime_association, back_populates="pucks"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class Sample(Base):
|
class Sample(Base):
|
||||||
@ -173,6 +199,9 @@ class Sample(Base):
|
|||||||
puck = relationship("Puck", back_populates="samples")
|
puck = relationship("Puck", back_populates="samples")
|
||||||
events = relationship("SampleEvent", back_populates="sample", lazy="joined")
|
events = relationship("SampleEvent", back_populates="sample", lazy="joined")
|
||||||
images = relationship("Image", back_populates="sample", lazy="joined")
|
images = relationship("Image", back_populates="sample", lazy="joined")
|
||||||
|
beamtimes = relationship(
|
||||||
|
"Beamtime", secondary=sample_beamtime_association, back_populates="samples"
|
||||||
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def mount_count(self) -> int:
|
def mount_count(self) -> int:
|
||||||
@ -236,11 +265,15 @@ class PuckEvent(Base):
|
|||||||
puck = relationship("Puck", back_populates="events")
|
puck = relationship("Puck", back_populates="events")
|
||||||
|
|
||||||
|
|
||||||
|
SHIFT_CHOICES = ("morning", "afternoon", "night")
|
||||||
|
|
||||||
|
|
||||||
class Beamtime(Base):
|
class Beamtime(Base):
|
||||||
__tablename__ = "beamtimes"
|
__tablename__ = "beamtimes"
|
||||||
|
|
||||||
id = Column(Integer, primary_key=True, index=True, autoincrement=True)
|
id = Column(Integer, primary_key=True, index=True, autoincrement=True)
|
||||||
pgroups = Column(String(255), nullable=False)
|
pgroups = Column(String(255), nullable=False)
|
||||||
|
shift = Column(Enum(*SHIFT_CHOICES, name="shift_enum"), nullable=False, index=True)
|
||||||
beamtime_name = Column(String(255), index=True)
|
beamtime_name = Column(String(255), index=True)
|
||||||
beamline = Column(String(255), nullable=True)
|
beamline = Column(String(255), nullable=True)
|
||||||
start_date = Column(Date, nullable=True)
|
start_date = Column(Date, nullable=True)
|
||||||
@ -251,7 +284,15 @@ class Beamtime(Base):
|
|||||||
local_contact_id = Column(Integer, ForeignKey("local_contacts.id"), nullable=False)
|
local_contact_id = Column(Integer, ForeignKey("local_contacts.id"), nullable=False)
|
||||||
|
|
||||||
local_contact = relationship("LocalContact")
|
local_contact = relationship("LocalContact")
|
||||||
dewars = relationship("Dewar", back_populates="beamtime")
|
dewars = relationship(
|
||||||
|
"Dewar", secondary=dewar_beamtime_association, back_populates="beamtimes"
|
||||||
|
)
|
||||||
|
pucks = relationship(
|
||||||
|
"Puck", secondary=puck_beamtime_association, back_populates="beamtimes"
|
||||||
|
)
|
||||||
|
samples = relationship(
|
||||||
|
"Sample", secondary=sample_beamtime_association, back_populates="beamtimes"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class Image(Base):
|
class Image(Base):
|
||||||
@ -273,7 +314,9 @@ class ExperimentParameters(Base):
|
|||||||
__tablename__ = "experiment_parameters"
|
__tablename__ = "experiment_parameters"
|
||||||
id = Column(Integer, primary_key=True, index=True, autoincrement=True)
|
id = Column(Integer, primary_key=True, index=True, autoincrement=True)
|
||||||
run_number = Column(Integer, nullable=False)
|
run_number = Column(Integer, nullable=False)
|
||||||
|
type = Column(String(255), nullable=False)
|
||||||
beamline_parameters = Column(JSON, nullable=True)
|
beamline_parameters = Column(JSON, nullable=True)
|
||||||
|
dataset = Column(JSON, nullable=True)
|
||||||
sample_id = Column(Integer, ForeignKey("samples.id"), nullable=False)
|
sample_id = Column(Integer, ForeignKey("samples.id"), nullable=False)
|
||||||
|
|
||||||
|
|
||||||
@ -281,6 +324,7 @@ class Results(Base):
|
|||||||
__tablename__ = "results"
|
__tablename__ = "results"
|
||||||
|
|
||||||
id = Column(Integer, primary_key=True, index=True, autoincrement=True)
|
id = Column(Integer, primary_key=True, index=True, autoincrement=True)
|
||||||
|
status = Column(String(255), nullable=False)
|
||||||
result = Column(JSON, nullable=False) # store the full result object as JSON
|
result = Column(JSON, nullable=False) # store the full result object as JSON
|
||||||
sample_id = Column(Integer, ForeignKey("samples.id"), nullable=False)
|
sample_id = Column(Integer, ForeignKey("samples.id"), nullable=False)
|
||||||
run_id = Column(Integer, ForeignKey("experiment_parameters.id"), nullable=False)
|
run_id = Column(Integer, ForeignKey("experiment_parameters.id"), nullable=False)
|
||||||
@ -309,17 +353,22 @@ class Results(Base):
|
|||||||
|
|
||||||
|
|
||||||
class JobStatus(str, enum.Enum):
|
class JobStatus(str, enum.Enum):
|
||||||
TODO = "todo"
|
TO_DO = "to_do"
|
||||||
SUBMITTED = "submitted"
|
SUBMITTED = "submitted"
|
||||||
DONE = "done"
|
DONE = "done"
|
||||||
|
TO_CANCEL = "to_cancel"
|
||||||
|
CANCELLED = "cancelled"
|
||||||
|
FAILED = "failed"
|
||||||
|
|
||||||
|
|
||||||
class Jobs(Base):
|
class Jobs(Base):
|
||||||
__tablename__ = "jobs"
|
__tablename__ = "jobs"
|
||||||
|
|
||||||
id = Column(Integer, primary_key=True, index=True)
|
id = Column(Integer, primary_key=True, index=True)
|
||||||
experiment_parameters_id = Column(Integer, nullable=False)
|
sample_id = Column(Integer, ForeignKey("samples.id"), nullable=False)
|
||||||
status = Column(Enum(JobStatus), default=JobStatus.TODO, nullable=False)
|
run_id = Column(Integer, ForeignKey("experiment_parameters.id"), nullable=False)
|
||||||
parameters = Column(JSON, nullable=False)
|
status = Column(String, nullable=False)
|
||||||
|
experiment_parameters = relationship(ExperimentParameters)
|
||||||
created_at = Column(DateTime, server_default=func.now())
|
created_at = Column(DateTime, server_default=func.now())
|
||||||
updated_at = Column(DateTime, onupdate=func.now())
|
updated_at = Column(DateTime, onupdate=func.now())
|
||||||
|
slurm_id = Column(Integer, nullable=True)
|
||||||
|
82
backend/app/routers/beamtime.py
Normal file
82
backend/app/routers/beamtime.py
Normal file
@ -0,0 +1,82 @@
|
|||||||
|
from fastapi import APIRouter, HTTPException, status, Depends
|
||||||
|
from sqlalchemy.orm import Session, joinedload
|
||||||
|
from sqlalchemy import or_
|
||||||
|
|
||||||
|
from app.models import Beamtime as BeamtimeModel
|
||||||
|
from app.schemas import (
|
||||||
|
Beamtime as BeamtimeSchema,
|
||||||
|
BeamtimeCreate,
|
||||||
|
loginData,
|
||||||
|
BeamtimeResponse,
|
||||||
|
)
|
||||||
|
from app.dependencies import get_db
|
||||||
|
from app.routers.auth import get_current_user
|
||||||
|
|
||||||
|
beamtime_router = APIRouter()
|
||||||
|
|
||||||
|
|
||||||
|
@beamtime_router.post("/", response_model=BeamtimeSchema)
|
||||||
|
async def create_beamtime(
|
||||||
|
beamtime: BeamtimeCreate,
|
||||||
|
db: Session = Depends(get_db),
|
||||||
|
current_user: loginData = Depends(get_current_user),
|
||||||
|
):
|
||||||
|
# Validate the pgroup belongs to the current user
|
||||||
|
if beamtime.pgroups not in current_user.pgroups:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_400_BAD_REQUEST,
|
||||||
|
detail="You do not have permission to create a beamtime for this pgroup.",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Check for existing beamtime for this pgroup, date, and shift
|
||||||
|
existing = (
|
||||||
|
db.query(BeamtimeModel)
|
||||||
|
.filter(
|
||||||
|
BeamtimeModel.pgroups == beamtime.pgroups,
|
||||||
|
BeamtimeModel.start_date == beamtime.start_date,
|
||||||
|
BeamtimeModel.shift == beamtime.shift,
|
||||||
|
)
|
||||||
|
.first()
|
||||||
|
)
|
||||||
|
if existing:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_400_BAD_REQUEST,
|
||||||
|
detail="A beamtime for this pgroup/shift/date already exists.",
|
||||||
|
)
|
||||||
|
|
||||||
|
db_beamtime = BeamtimeModel(
|
||||||
|
pgroups=beamtime.pgroups,
|
||||||
|
shift=beamtime.shift,
|
||||||
|
beamtime_name=beamtime.beamtime_name,
|
||||||
|
beamline=beamtime.beamline,
|
||||||
|
start_date=beamtime.start_date,
|
||||||
|
end_date=beamtime.end_date,
|
||||||
|
status=beamtime.status,
|
||||||
|
comments=beamtime.comments,
|
||||||
|
proposal_id=beamtime.proposal_id,
|
||||||
|
local_contact_id=beamtime.local_contact_id,
|
||||||
|
)
|
||||||
|
|
||||||
|
db.add(db_beamtime)
|
||||||
|
db.commit()
|
||||||
|
db.refresh(db_beamtime)
|
||||||
|
return db_beamtime
|
||||||
|
|
||||||
|
|
||||||
|
@beamtime_router.get(
|
||||||
|
"/my-beamtimes",
|
||||||
|
response_model=list[BeamtimeResponse],
|
||||||
|
)
|
||||||
|
async def get_my_beamtimes(
|
||||||
|
db: Session = Depends(get_db),
|
||||||
|
current_user: loginData = Depends(get_current_user),
|
||||||
|
):
|
||||||
|
user_pgroups = current_user.pgroups
|
||||||
|
filters = [BeamtimeModel.pgroups.like(f"%{pgroup}%") for pgroup in user_pgroups]
|
||||||
|
beamtimes = (
|
||||||
|
db.query(BeamtimeModel)
|
||||||
|
.options(joinedload(BeamtimeModel.local_contact))
|
||||||
|
.filter(or_(*filters))
|
||||||
|
.all()
|
||||||
|
)
|
||||||
|
return beamtimes
|
@ -1,6 +1,7 @@
|
|||||||
import os
|
import os
|
||||||
import tempfile
|
import tempfile
|
||||||
import time
|
import time
|
||||||
|
from datetime import datetime, timedelta
|
||||||
import random
|
import random
|
||||||
import hashlib
|
import hashlib
|
||||||
from fastapi import APIRouter, HTTPException, status, Depends, Response
|
from fastapi import APIRouter, HTTPException, status, Depends, Response
|
||||||
@ -21,7 +22,10 @@ from app.schemas import (
|
|||||||
Sample,
|
Sample,
|
||||||
Puck,
|
Puck,
|
||||||
SampleEventResponse,
|
SampleEventResponse,
|
||||||
DewarSchema, # Clearer name for schema
|
DewarSchema,
|
||||||
|
loginData,
|
||||||
|
DewarWithPucksResponse,
|
||||||
|
PuckResponse,
|
||||||
)
|
)
|
||||||
from app.models import (
|
from app.models import (
|
||||||
Dewar as DewarModel,
|
Dewar as DewarModel,
|
||||||
@ -32,6 +36,7 @@ from app.models import (
|
|||||||
LogisticsEvent,
|
LogisticsEvent,
|
||||||
PuckEvent,
|
PuckEvent,
|
||||||
SampleEvent,
|
SampleEvent,
|
||||||
|
Beamtime as BeamtimeModel,
|
||||||
)
|
)
|
||||||
from app.dependencies import get_db
|
from app.dependencies import get_db
|
||||||
import qrcode
|
import qrcode
|
||||||
@ -44,7 +49,10 @@ from reportlab.pdfgen import canvas
|
|||||||
from app.crud import (
|
from app.crud import (
|
||||||
get_shipment_by_id,
|
get_shipment_by_id,
|
||||||
)
|
)
|
||||||
|
from app.routers.auth import get_current_user
|
||||||
|
|
||||||
|
logging.basicConfig(level=logging.INFO)
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
dewar_router = APIRouter()
|
dewar_router = APIRouter()
|
||||||
|
|
||||||
@ -543,6 +551,99 @@ def get_all_serial_numbers(db: Session = Depends(get_db)):
|
|||||||
raise HTTPException(status_code=500, detail="Internal server error")
|
raise HTTPException(status_code=500, detail="Internal server error")
|
||||||
|
|
||||||
|
|
||||||
|
@dewar_router.get(
|
||||||
|
"/recent-dewars-with-pucks",
|
||||||
|
response_model=List[DewarWithPucksResponse],
|
||||||
|
operation_id="getRecentDewarsWithPucks",
|
||||||
|
)
|
||||||
|
async def get_recent_dewars_with_pucks(
|
||||||
|
db: Session = Depends(get_db), current_user: loginData = Depends(get_current_user)
|
||||||
|
):
|
||||||
|
# Get the timestamp for two months ago
|
||||||
|
two_months_ago = datetime.now() - timedelta(days=60)
|
||||||
|
|
||||||
|
# Query dewars for this user created in the last 2 months
|
||||||
|
dewars = (
|
||||||
|
db.query(DewarModel)
|
||||||
|
.options(joinedload(DewarModel.pucks)) # Eager load pucks
|
||||||
|
.filter(
|
||||||
|
DewarModel.pgroups.in_(current_user.pgroups),
|
||||||
|
DewarModel.created_at >= two_months_ago,
|
||||||
|
)
|
||||||
|
.all()
|
||||||
|
)
|
||||||
|
|
||||||
|
result = []
|
||||||
|
for dewar in dewars:
|
||||||
|
pucks = db.query(PuckModel).filter(PuckModel.dewar_id == dewar.id).all()
|
||||||
|
result.append(
|
||||||
|
DewarWithPucksResponse(
|
||||||
|
id=dewar.id,
|
||||||
|
dewar_name=dewar.dewar_name,
|
||||||
|
created_at=dewar.created_at,
|
||||||
|
pucks=[
|
||||||
|
PuckResponse(id=puck.id, puck_name=puck.puck_name) for puck in pucks
|
||||||
|
],
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
@dewar_router.patch(
|
||||||
|
"/dewar/{dewar_id}/assign-beamtime", operation_id="assignDewarToBeamtime"
|
||||||
|
)
|
||||||
|
async def assign_beamtime_to_dewar(
|
||||||
|
dewar_id: int,
|
||||||
|
beamtime_id: int, # Use Query if you want this from ?beamtime_id=...
|
||||||
|
db: Session = Depends(get_db),
|
||||||
|
):
|
||||||
|
dewar = db.query(DewarModel).filter(DewarModel.id == dewar_id).first()
|
||||||
|
if not dewar:
|
||||||
|
raise HTTPException(status_code=404, detail="Dewar not found")
|
||||||
|
|
||||||
|
# Check if any sample (in any puck on this dewar) has sample events
|
||||||
|
for puck in dewar.pucks:
|
||||||
|
for sample in puck.samples:
|
||||||
|
sample_event_exists = (
|
||||||
|
db.query(SampleEvent).filter(SampleEvent.sample_id == sample.id).first()
|
||||||
|
)
|
||||||
|
if sample_event_exists:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=400,
|
||||||
|
detail="Cannot change beamtime:"
|
||||||
|
"at least one sample has events recorded.",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Find the Beamtime instance, if not unassigning
|
||||||
|
beamtime = (
|
||||||
|
db.query(BeamtimeModel).filter(BeamtimeModel.id == beamtime_id).first()
|
||||||
|
if beamtime_id
|
||||||
|
else None
|
||||||
|
)
|
||||||
|
|
||||||
|
if beamtime_id == 0:
|
||||||
|
dewar.beamtimes = []
|
||||||
|
else:
|
||||||
|
dewar.beamtimes = [beamtime]
|
||||||
|
|
||||||
|
db.commit()
|
||||||
|
db.refresh(dewar)
|
||||||
|
for puck in dewar.pucks:
|
||||||
|
if beamtime_id == 0:
|
||||||
|
puck.beamtimes = []
|
||||||
|
else:
|
||||||
|
puck.beamtimes = [beamtime]
|
||||||
|
for sample in puck.samples:
|
||||||
|
# Can assume all have no events because of previous check
|
||||||
|
if beamtime_id == 0:
|
||||||
|
sample.beamtimes = []
|
||||||
|
else:
|
||||||
|
sample.beamtimes = [beamtime]
|
||||||
|
|
||||||
|
db.commit()
|
||||||
|
return {"status": "success", "dewar_id": dewar.id, "beamtime_id": beamtime_id}
|
||||||
|
|
||||||
|
|
||||||
@dewar_router.get("/{dewar_id}", response_model=Dewar)
|
@dewar_router.get("/{dewar_id}", response_model=Dewar)
|
||||||
async def get_dewar(dewar_id: int, db: Session = Depends(get_db)):
|
async def get_dewar(dewar_id: int, db: Session = Depends(get_db)):
|
||||||
dewar = (
|
dewar = (
|
||||||
@ -646,3 +747,25 @@ async def get_single_shipment(id: int, db: Session = Depends(get_db)):
|
|||||||
except SQLAlchemyError as e:
|
except SQLAlchemyError as e:
|
||||||
logging.error(f"Database error occurred: {e}")
|
logging.error(f"Database error occurred: {e}")
|
||||||
raise HTTPException(status_code=500, detail="Internal server error")
|
raise HTTPException(status_code=500, detail="Internal server error")
|
||||||
|
|
||||||
|
|
||||||
|
@dewar_router.get(
|
||||||
|
"/by-beamtime/{beamtime_id}",
|
||||||
|
response_model=List[DewarSchema],
|
||||||
|
operation_id="get_dewars_by_beamtime",
|
||||||
|
)
|
||||||
|
async def get_dewars_by_beamtime(beamtime_id: int, db: Session = Depends(get_db)):
|
||||||
|
logger.info(f"get_dewars_by_beamtime called with beamtime_id={beamtime_id}")
|
||||||
|
beamtime = (
|
||||||
|
db.query(BeamtimeModel)
|
||||||
|
.options(joinedload(BeamtimeModel.dewars))
|
||||||
|
.filter(BeamtimeModel.id == beamtime_id)
|
||||||
|
.first()
|
||||||
|
)
|
||||||
|
if not beamtime:
|
||||||
|
logger.warning(f"Beamtime {beamtime_id} not found")
|
||||||
|
raise HTTPException(status_code=404, detail="Beamtime not found")
|
||||||
|
logger.info(
|
||||||
|
f"Returning {len(beamtime.dewars)} dewars: {[d.id for d in beamtime.dewars]}"
|
||||||
|
)
|
||||||
|
return beamtime.dewars
|
||||||
|
@ -1,27 +1,123 @@
|
|||||||
import json
|
import json
|
||||||
import asyncio
|
import asyncio
|
||||||
from fastapi import APIRouter, Depends
|
from fastapi import APIRouter, Depends
|
||||||
|
from fastapi.encoders import jsonable_encoder
|
||||||
from sqlalchemy.orm import Session
|
from sqlalchemy.orm import Session
|
||||||
from starlette.responses import StreamingResponse
|
from starlette.responses import StreamingResponse
|
||||||
from app.models import JobStatus, Jobs as JobModel
|
from app.models import (
|
||||||
|
Jobs as JobModel,
|
||||||
|
ExperimentParameters as ExperimentParametersModel,
|
||||||
|
Sample as SampleModel,
|
||||||
|
)
|
||||||
|
from app.schemas import JobsResponse, JobsUpdate
|
||||||
from app.dependencies import get_db
|
from app.dependencies import get_db
|
||||||
|
|
||||||
router = APIRouter()
|
router = APIRouter()
|
||||||
|
|
||||||
|
|
||||||
async def job_event_generator(db: Session):
|
async def job_event_generator(get_db):
|
||||||
while True:
|
while True:
|
||||||
# Fetch jobs with status TODO
|
# Open a new session for this iteration and close it at the end
|
||||||
jobs = db.query(JobModel).filter(JobModel.status == JobStatus.TODO).all()
|
with next(get_db()) as db:
|
||||||
|
jobs = db.query(JobModel).all()
|
||||||
|
job_items = []
|
||||||
|
for job in jobs:
|
||||||
|
sample = db.query(SampleModel).filter_by(id=job.sample_id).first()
|
||||||
|
experiment = (
|
||||||
|
db.query(ExperimentParametersModel)
|
||||||
|
.filter(
|
||||||
|
ExperimentParametersModel.sample_id == sample.id,
|
||||||
|
ExperimentParametersModel.id == job.run_id,
|
||||||
|
)
|
||||||
|
.first()
|
||||||
|
)
|
||||||
|
|
||||||
if jobs:
|
job_item = JobsResponse(
|
||||||
# It's recommended to explicitly communicate IDs clearly
|
job_id=job.id,
|
||||||
job_payload = [{"id": job.id, "parameters": job.parameters} for job in jobs]
|
sample_id=sample.id,
|
||||||
yield f"data: {json.dumps(job_payload)}\n\n"
|
run_id=job.run_id,
|
||||||
|
sample_name=sample.sample_name,
|
||||||
|
status=job.status,
|
||||||
|
type=experiment.type if experiment else None,
|
||||||
|
created_at=job.created_at,
|
||||||
|
updated_at=job.updated_at,
|
||||||
|
data_collection_parameters=sample.data_collection_parameters,
|
||||||
|
experiment_parameters=experiment.beamline_parameters
|
||||||
|
if experiment
|
||||||
|
else None,
|
||||||
|
filepath=experiment.dataset.get("filepath")
|
||||||
|
if experiment and experiment.dataset
|
||||||
|
else None,
|
||||||
|
slurm_id=job.slurm_id,
|
||||||
|
)
|
||||||
|
job_items.append(job_item)
|
||||||
|
|
||||||
await asyncio.sleep(5) # A reasonable heartbeat/refresh
|
if job_items:
|
||||||
|
serialized = jsonable_encoder(job_items)
|
||||||
|
yield f"data: {json.dumps(serialized)}\n\n"
|
||||||
|
|
||||||
|
await asyncio.sleep(5)
|
||||||
|
|
||||||
|
|
||||||
|
# A reasonable heartbeat/refresh
|
||||||
|
|
||||||
|
|
||||||
@router.get("/jobs/stream")
|
@router.get("/jobs/stream")
|
||||||
async def stream_jobs(db: Session = Depends(get_db)):
|
async def stream_jobs():
|
||||||
return StreamingResponse(job_event_generator(db), media_type="text/event-stream")
|
# Pass the dependency itself, not an active session
|
||||||
|
from app.dependencies import get_db
|
||||||
|
|
||||||
|
return StreamingResponse(
|
||||||
|
job_event_generator(get_db), media_type="text/event-stream"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post(
|
||||||
|
"/jobs/update_status", response_model=JobsUpdate, operation_id="update_status"
|
||||||
|
)
|
||||||
|
def update_jobs_status(payload: JobsUpdate, db: Session = Depends(get_db)):
|
||||||
|
# Fetch the job by job_id
|
||||||
|
job = db.query(JobModel).filter(JobModel.id == payload.job_id).first()
|
||||||
|
if not job:
|
||||||
|
# Optionally, use HTTPException for proper status code
|
||||||
|
from fastapi import HTTPException
|
||||||
|
|
||||||
|
raise HTTPException(status_code=404, detail="Job not found")
|
||||||
|
|
||||||
|
# If status is being updated to "cancelled"
|
||||||
|
if payload.status == "cancelled":
|
||||||
|
job.slurm_id = None
|
||||||
|
|
||||||
|
# Update the status
|
||||||
|
job.status = payload.status
|
||||||
|
job.slurm_id = payload.slurm_id
|
||||||
|
# Optionally update 'updated_at'
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
job.updated_at = datetime.now()
|
||||||
|
|
||||||
|
db.commit()
|
||||||
|
db.refresh(job)
|
||||||
|
|
||||||
|
# Return the updated job's info as response
|
||||||
|
return JobsUpdate(job_id=job.id, status=job.status, slurm_id=job.slurm_id)
|
||||||
|
|
||||||
|
|
||||||
|
def cleanup_cancelled_jobs(db: Session):
|
||||||
|
from datetime import datetime
|
||||||
|
from datetime import timedelta
|
||||||
|
|
||||||
|
"""Delete jobs in 'cancelled' state for more than 2 hours."""
|
||||||
|
cutoff = datetime.now() - timedelta(hours=2)
|
||||||
|
print(
|
||||||
|
f"Cleaning up cancelled jobs older than {cutoff} "
|
||||||
|
f"(current time: {datetime.now()})"
|
||||||
|
)
|
||||||
|
old_jobs = (
|
||||||
|
db.query(JobModel)
|
||||||
|
.filter(JobModel.status == "cancelled", JobModel.updated_at < cutoff)
|
||||||
|
.all()
|
||||||
|
)
|
||||||
|
for job in old_jobs:
|
||||||
|
db.delete(job)
|
||||||
|
db.commit()
|
||||||
|
@ -2,6 +2,7 @@ from fastapi import APIRouter, Depends
|
|||||||
|
|
||||||
from app.routers.auth import get_current_user
|
from app.routers.auth import get_current_user
|
||||||
from app.routers.address import address_router
|
from app.routers.address import address_router
|
||||||
|
from app.routers.beamtime import beamtime_router
|
||||||
from app.routers.contact import contact_router
|
from app.routers.contact import contact_router
|
||||||
from app.routers.shipment import shipment_router
|
from app.routers.shipment import shipment_router
|
||||||
from app.routers.dewar import dewar_router
|
from app.routers.dewar import dewar_router
|
||||||
@ -20,3 +21,6 @@ protected_router.include_router(
|
|||||||
shipment_router, prefix="/shipments", tags=["shipments"]
|
shipment_router, prefix="/shipments", tags=["shipments"]
|
||||||
)
|
)
|
||||||
protected_router.include_router(dewar_router, prefix="/dewars", tags=["dewars"])
|
protected_router.include_router(dewar_router, prefix="/dewars", tags=["dewars"])
|
||||||
|
protected_router.include_router(
|
||||||
|
beamtime_router, prefix="/beamtimes", tags=["beamtimes"]
|
||||||
|
)
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from fastapi import APIRouter, HTTPException, status, Depends
|
from fastapi import APIRouter, HTTPException, status, Depends
|
||||||
from sqlalchemy.orm import Session
|
from sqlalchemy.orm import Session, joinedload
|
||||||
from sqlalchemy.sql import func
|
from sqlalchemy.sql import func
|
||||||
from typing import List
|
from typing import List
|
||||||
import uuid
|
import uuid
|
||||||
@ -20,6 +20,8 @@ from app.models import (
|
|||||||
Sample as SampleModel,
|
Sample as SampleModel,
|
||||||
LogisticsEvent as LogisticsEventModel,
|
LogisticsEvent as LogisticsEventModel,
|
||||||
Dewar as DewarModel,
|
Dewar as DewarModel,
|
||||||
|
SampleEvent,
|
||||||
|
Beamtime as BeamtimeModel,
|
||||||
)
|
)
|
||||||
from app.dependencies import get_db
|
from app.dependencies import get_db
|
||||||
import logging
|
import logging
|
||||||
@ -658,3 +660,69 @@ async def get_pucks_by_slot(slot_identifier: str, db: Session = Depends(get_db))
|
|||||||
)
|
)
|
||||||
|
|
||||||
return pucks
|
return pucks
|
||||||
|
|
||||||
|
|
||||||
|
@router.patch("/puck/{puck_id}/assign-beamtime", operation_id="assignPuckToBeamtime")
|
||||||
|
async def assign_beamtime_to_puck(
|
||||||
|
puck_id: int,
|
||||||
|
beamtime_id: int,
|
||||||
|
db: Session = Depends(get_db),
|
||||||
|
):
|
||||||
|
puck = db.query(PuckModel).filter(PuckModel.id == puck_id).first()
|
||||||
|
if not puck:
|
||||||
|
raise HTTPException(status_code=404, detail="Puck not found")
|
||||||
|
|
||||||
|
# Check if any sample in this puck has sample events
|
||||||
|
for sample in puck.samples:
|
||||||
|
sample_event_exists = (
|
||||||
|
db.query(SampleEvent).filter(SampleEvent.sample_id == sample.id).first()
|
||||||
|
)
|
||||||
|
if sample_event_exists:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=400,
|
||||||
|
detail="Cannot change beamtime:"
|
||||||
|
"at least one sample has events recorded.",
|
||||||
|
)
|
||||||
|
|
||||||
|
beamtime = (
|
||||||
|
db.query(BeamtimeModel).filter(BeamtimeModel.id == beamtime_id).first()
|
||||||
|
if beamtime_id
|
||||||
|
else None
|
||||||
|
)
|
||||||
|
|
||||||
|
if beamtime_id == 0:
|
||||||
|
puck.beamtimes = []
|
||||||
|
else:
|
||||||
|
puck.beamtimes = [beamtime]
|
||||||
|
|
||||||
|
db.commit()
|
||||||
|
db.refresh(puck)
|
||||||
|
for sample in puck.samples:
|
||||||
|
if beamtime_id == 0:
|
||||||
|
sample.beamtimes = []
|
||||||
|
else:
|
||||||
|
sample.beamtimes = [beamtime]
|
||||||
|
db.commit()
|
||||||
|
return {"status": "success", "puck_id": puck.id, "beamtime_id": beamtime_id}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get(
|
||||||
|
"/by-beamtime/{beamtime_id}",
|
||||||
|
response_model=List[PuckSchema],
|
||||||
|
operation_id="get_pucks_by_beamtime",
|
||||||
|
)
|
||||||
|
async def get_pucks_by_beamtime(beamtime_id: int, db: Session = Depends(get_db)):
|
||||||
|
logger.info(f"get_pucks_by_beamtime called with beamtime_id={beamtime_id}")
|
||||||
|
beamtime = (
|
||||||
|
db.query(BeamtimeModel)
|
||||||
|
.options(joinedload(BeamtimeModel.pucks)) # eager load pucks
|
||||||
|
.filter(BeamtimeModel.id == beamtime_id)
|
||||||
|
.first()
|
||||||
|
)
|
||||||
|
if not beamtime:
|
||||||
|
logger.warning(f"Beamtime {beamtime_id} not found")
|
||||||
|
raise HTTPException(status_code=404, detail="Beamtime not found")
|
||||||
|
logger.info(
|
||||||
|
f"Returning {len(beamtime.pucks)} pucks: {[p.id for p in beamtime.pucks]}"
|
||||||
|
)
|
||||||
|
return beamtime.pucks
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
from fastapi import APIRouter, HTTPException, Depends, UploadFile, File, Form
|
from fastapi import APIRouter, HTTPException, Depends, UploadFile, File, Form
|
||||||
|
from fastapi.encoders import jsonable_encoder
|
||||||
from sqlalchemy.orm import Session
|
from sqlalchemy.orm import Session
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import List
|
from typing import List
|
||||||
@ -18,6 +19,7 @@ from app.schemas import (
|
|||||||
ResultResponse,
|
ResultResponse,
|
||||||
ResultCreate,
|
ResultCreate,
|
||||||
Results as ProcessingResults,
|
Results as ProcessingResults,
|
||||||
|
Datasets,
|
||||||
)
|
)
|
||||||
from app.models import (
|
from app.models import (
|
||||||
Puck as PuckModel,
|
Puck as PuckModel,
|
||||||
@ -30,6 +32,7 @@ from app.models import (
|
|||||||
Results as ResultsModel,
|
Results as ResultsModel,
|
||||||
Jobs as JobModel,
|
Jobs as JobModel,
|
||||||
JobStatus,
|
JobStatus,
|
||||||
|
Beamtime as BeamtimeModel,
|
||||||
)
|
)
|
||||||
from app.dependencies import get_db
|
from app.dependencies import get_db
|
||||||
import logging
|
import logging
|
||||||
@ -295,6 +298,7 @@ async def get_sample_results(active_pgroup: str, db: Session = Depends(get_db)):
|
|||||||
experiment_runs=[
|
experiment_runs=[
|
||||||
ExperimentParametersRead(
|
ExperimentParametersRead(
|
||||||
id=ex.id,
|
id=ex.id,
|
||||||
|
type=ex.type,
|
||||||
run_number=ex.run_number,
|
run_number=ex.run_number,
|
||||||
beamline_parameters=ex.beamline_parameters,
|
beamline_parameters=ex.beamline_parameters,
|
||||||
sample_id=ex.sample_id,
|
sample_id=ex.sample_id,
|
||||||
@ -332,6 +336,7 @@ def create_experiment_parameters_for_sample(
|
|||||||
# stored as JSON.
|
# stored as JSON.
|
||||||
new_exp = ExperimentParametersModel(
|
new_exp = ExperimentParametersModel(
|
||||||
run_number=new_run_number,
|
run_number=new_run_number,
|
||||||
|
type=exp_params.type,
|
||||||
beamline_parameters=exp_params.beamline_parameters.dict()
|
beamline_parameters=exp_params.beamline_parameters.dict()
|
||||||
if exp_params.beamline_parameters
|
if exp_params.beamline_parameters
|
||||||
else None,
|
else None,
|
||||||
@ -350,18 +355,60 @@ def create_experiment_parameters_for_sample(
|
|||||||
db.add(new_event)
|
db.add(new_event)
|
||||||
db.commit()
|
db.commit()
|
||||||
|
|
||||||
new_job = JobModel(
|
|
||||||
experiment_parameters_id=new_exp.id, # <-- Correct reference here
|
|
||||||
parameters=new_exp.to_dict(), # assuming params has a to_dict() method
|
|
||||||
status=JobStatus.TODO,
|
|
||||||
)
|
|
||||||
db.add(new_job)
|
|
||||||
db.commit()
|
|
||||||
db.refresh(new_job)
|
|
||||||
|
|
||||||
return new_exp
|
return new_exp
|
||||||
|
|
||||||
|
|
||||||
|
@router.patch(
|
||||||
|
"/update-dataset/{sample_id}/{run_id}",
|
||||||
|
response_model=ExperimentParametersRead,
|
||||||
|
operation_id="update_dataset_for_experiment_run",
|
||||||
|
)
|
||||||
|
def update_experiment_run_dataset(
|
||||||
|
sample_id: int,
|
||||||
|
run_id: int,
|
||||||
|
dataset: Datasets,
|
||||||
|
db: Session = Depends(get_db),
|
||||||
|
):
|
||||||
|
# Find the run for this sample and run_id
|
||||||
|
exp = (
|
||||||
|
db.query(ExperimentParametersModel)
|
||||||
|
.filter(
|
||||||
|
ExperimentParametersModel.sample_id == sample_id,
|
||||||
|
ExperimentParametersModel.id == run_id,
|
||||||
|
)
|
||||||
|
.first()
|
||||||
|
)
|
||||||
|
if not exp:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=404,
|
||||||
|
detail="ExperimentParameters (run) not found for this sample",
|
||||||
|
)
|
||||||
|
|
||||||
|
exp.dataset = jsonable_encoder(dataset)
|
||||||
|
db.commit()
|
||||||
|
db.refresh(exp)
|
||||||
|
|
||||||
|
# Only create a job if status is "written" and job does not exist yet
|
||||||
|
if dataset.status == "written":
|
||||||
|
job_exists = (
|
||||||
|
db.query(JobModel)
|
||||||
|
.filter(JobModel.sample_id == sample_id, JobModel.run_id == run_id)
|
||||||
|
.first()
|
||||||
|
)
|
||||||
|
if not job_exists:
|
||||||
|
new_job = JobModel(
|
||||||
|
sample_id=sample_id,
|
||||||
|
run_id=run_id,
|
||||||
|
experiment_parameters=exp, # adjust this line as appropriate
|
||||||
|
status=JobStatus.TO_DO,
|
||||||
|
)
|
||||||
|
db.add(new_job)
|
||||||
|
db.commit()
|
||||||
|
db.refresh(new_job)
|
||||||
|
|
||||||
|
return exp
|
||||||
|
|
||||||
|
|
||||||
@router.post(
|
@router.post(
|
||||||
"/processing-results", response_model=ResultResponse, operation_id="create_result"
|
"/processing-results", response_model=ResultResponse, operation_id="create_result"
|
||||||
)
|
)
|
||||||
@ -379,6 +426,7 @@ def create_result(payload: ResultCreate, db: Session = Depends(get_db)):
|
|||||||
|
|
||||||
result_entry = ResultsModel(
|
result_entry = ResultsModel(
|
||||||
sample_id=payload.sample_id,
|
sample_id=payload.sample_id,
|
||||||
|
status=payload.status,
|
||||||
run_id=payload.run_id,
|
run_id=payload.run_id,
|
||||||
result=payload.result.model_dump(), # Serialize entire result to JSON
|
result=payload.result.model_dump(), # Serialize entire result to JSON
|
||||||
)
|
)
|
||||||
@ -389,6 +437,7 @@ def create_result(payload: ResultCreate, db: Session = Depends(get_db)):
|
|||||||
|
|
||||||
return ResultResponse(
|
return ResultResponse(
|
||||||
id=result_entry.id,
|
id=result_entry.id,
|
||||||
|
status=result_entry.status,
|
||||||
sample_id=result_entry.sample_id,
|
sample_id=result_entry.sample_id,
|
||||||
run_id=result_entry.run_id,
|
run_id=result_entry.run_id,
|
||||||
result=payload.result, # return original payload directly
|
result=payload.result, # return original payload directly
|
||||||
@ -415,6 +464,7 @@ async def get_results_for_run_and_sample(
|
|||||||
formatted_results = [
|
formatted_results = [
|
||||||
ResultResponse(
|
ResultResponse(
|
||||||
id=result.id,
|
id=result.id,
|
||||||
|
status=result.status,
|
||||||
sample_id=result.sample_id,
|
sample_id=result.sample_id,
|
||||||
run_id=result.run_id,
|
run_id=result.run_id,
|
||||||
result=ProcessingResults(**result.result),
|
result=ProcessingResults(**result.result),
|
||||||
@ -423,3 +473,20 @@ async def get_results_for_run_and_sample(
|
|||||||
]
|
]
|
||||||
|
|
||||||
return formatted_results
|
return formatted_results
|
||||||
|
|
||||||
|
|
||||||
|
@router.get(
|
||||||
|
"/by-beamtime/{beamtime_id}",
|
||||||
|
response_model=List[SampleSchema],
|
||||||
|
operation_id="get_samples_by_beamtime",
|
||||||
|
)
|
||||||
|
async def get_samples_by_beamtime(beamtime_id: int, db: Session = Depends(get_db)):
|
||||||
|
beamtime = (
|
||||||
|
db.query(BeamtimeModel)
|
||||||
|
.options(joinedload(BeamtimeModel.samples))
|
||||||
|
.filter(BeamtimeModel.id == beamtime_id)
|
||||||
|
.first()
|
||||||
|
)
|
||||||
|
if not beamtime:
|
||||||
|
raise HTTPException(status_code=404, detail="Beamtime not found")
|
||||||
|
return beamtime.samples
|
||||||
|
@ -373,13 +373,13 @@ class Results(BaseModel):
|
|||||||
resolution: float
|
resolution: float
|
||||||
unit_cell: str
|
unit_cell: str
|
||||||
spacegroup: str
|
spacegroup: str
|
||||||
rmerge: float
|
rmerge: List[CurvePoint]
|
||||||
rmeas: float
|
rmeas: List[CurvePoint]
|
||||||
isig: float
|
isig: List[CurvePoint]
|
||||||
cc: List[CurvePoint]
|
cc: List[CurvePoint]
|
||||||
cchalf: List[CurvePoint]
|
cchalf: List[CurvePoint]
|
||||||
completeness: float
|
completeness: List[CurvePoint]
|
||||||
multiplicity: float
|
multiplicity: List[CurvePoint]
|
||||||
nobs: int
|
nobs: int
|
||||||
total_refl: int
|
total_refl: int
|
||||||
unique_refl: int
|
unique_refl: int
|
||||||
@ -478,6 +478,55 @@ class AddressMinimal(BaseModel):
|
|||||||
id: int
|
id: int
|
||||||
|
|
||||||
|
|
||||||
|
class Beamtime(BaseModel):
|
||||||
|
id: int
|
||||||
|
pgroups: str
|
||||||
|
shift: str
|
||||||
|
beamtime_name: str
|
||||||
|
beamline: str
|
||||||
|
start_date: date
|
||||||
|
end_date: date
|
||||||
|
status: str
|
||||||
|
comments: Optional[constr(max_length=200)] = None
|
||||||
|
proposal_id: Optional[int]
|
||||||
|
local_contact_id: Optional[int]
|
||||||
|
local_contact: Optional[LocalContact]
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
from_attributes = True
|
||||||
|
|
||||||
|
|
||||||
|
class BeamtimeCreate(BaseModel):
|
||||||
|
pgroups: str # this should be changed to pgroup
|
||||||
|
shift: str
|
||||||
|
beamtime_name: str
|
||||||
|
beamline: str
|
||||||
|
start_date: date
|
||||||
|
end_date: date
|
||||||
|
status: str
|
||||||
|
comments: Optional[constr(max_length=200)] = None
|
||||||
|
proposal_id: Optional[int]
|
||||||
|
local_contact_id: Optional[int]
|
||||||
|
|
||||||
|
|
||||||
|
class BeamtimeResponse(BaseModel):
|
||||||
|
id: int
|
||||||
|
pgroups: str
|
||||||
|
shift: str
|
||||||
|
beamtime_name: str
|
||||||
|
beamline: str
|
||||||
|
start_date: date
|
||||||
|
end_date: date
|
||||||
|
status: str
|
||||||
|
comments: Optional[str] = None
|
||||||
|
proposal_id: Optional[int]
|
||||||
|
local_contact_id: Optional[int]
|
||||||
|
local_contact: Optional[LocalContact]
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
from_attributes = True
|
||||||
|
|
||||||
|
|
||||||
class Sample(BaseModel):
|
class Sample(BaseModel):
|
||||||
id: int
|
id: int
|
||||||
sample_name: str
|
sample_name: str
|
||||||
@ -493,6 +542,7 @@ class Sample(BaseModel):
|
|||||||
mount_count: Optional[int] = None
|
mount_count: Optional[int] = None
|
||||||
unmount_count: Optional[int] = None
|
unmount_count: Optional[int] = None
|
||||||
# results: Optional[Results] = None
|
# results: Optional[Results] = None
|
||||||
|
beamtimes: List[Beamtime] = []
|
||||||
|
|
||||||
class Config:
|
class Config:
|
||||||
from_attributes = True
|
from_attributes = True
|
||||||
@ -507,6 +557,7 @@ class SampleCreate(BaseModel):
|
|||||||
comments: Optional[str] = None
|
comments: Optional[str] = None
|
||||||
results: Optional[Results] = None
|
results: Optional[Results] = None
|
||||||
events: Optional[List[str]] = None
|
events: Optional[List[str]] = None
|
||||||
|
beamtime_ids: List[int] = []
|
||||||
|
|
||||||
class Config:
|
class Config:
|
||||||
populate_by_name = True
|
populate_by_name = True
|
||||||
@ -534,6 +585,7 @@ class PuckCreate(BaseModel):
|
|||||||
puck_type: str
|
puck_type: str
|
||||||
puck_location_in_dewar: int
|
puck_location_in_dewar: int
|
||||||
samples: List[SampleCreate] = []
|
samples: List[SampleCreate] = []
|
||||||
|
beamtime_ids: List[int] = []
|
||||||
|
|
||||||
|
|
||||||
class PuckUpdate(BaseModel):
|
class PuckUpdate(BaseModel):
|
||||||
@ -541,6 +593,7 @@ class PuckUpdate(BaseModel):
|
|||||||
puck_type: Optional[str] = None
|
puck_type: Optional[str] = None
|
||||||
puck_location_in_dewar: Optional[int] = None
|
puck_location_in_dewar: Optional[int] = None
|
||||||
dewar_id: Optional[int] = None
|
dewar_id: Optional[int] = None
|
||||||
|
beamtime_ids: List[int] = []
|
||||||
|
|
||||||
|
|
||||||
class Puck(BaseModel):
|
class Puck(BaseModel):
|
||||||
@ -551,6 +604,7 @@ class Puck(BaseModel):
|
|||||||
dewar_id: int
|
dewar_id: int
|
||||||
events: List[PuckEvent] = []
|
events: List[PuckEvent] = []
|
||||||
samples: List[Sample] = []
|
samples: List[Sample] = []
|
||||||
|
beamtimes: List[Beamtime] = []
|
||||||
|
|
||||||
class Config:
|
class Config:
|
||||||
from_attributes = True
|
from_attributes = True
|
||||||
@ -565,10 +619,12 @@ class DewarBase(BaseModel):
|
|||||||
tracking_number: str
|
tracking_number: str
|
||||||
number_of_pucks: Optional[int] = None
|
number_of_pucks: Optional[int] = None
|
||||||
number_of_samples: Optional[int] = None
|
number_of_samples: Optional[int] = None
|
||||||
|
created_at: Optional[datetime] = None
|
||||||
status: str
|
status: str
|
||||||
contact_id: Optional[int]
|
contact_id: Optional[int]
|
||||||
return_address_id: Optional[int]
|
return_address_id: Optional[int]
|
||||||
pucks: List[PuckCreate] = []
|
pucks: List[PuckCreate] = []
|
||||||
|
beamtimes: List[Beamtime] = []
|
||||||
|
|
||||||
class Config:
|
class Config:
|
||||||
from_attributes = True
|
from_attributes = True
|
||||||
@ -581,6 +637,7 @@ class DewarCreate(DewarBase):
|
|||||||
class Dewar(DewarBase):
|
class Dewar(DewarBase):
|
||||||
id: int
|
id: int
|
||||||
pgroups: str
|
pgroups: str
|
||||||
|
created_at: Optional[datetime] = None
|
||||||
shipment_id: Optional[int]
|
shipment_id: Optional[int]
|
||||||
contact: Optional[Contact]
|
contact: Optional[Contact]
|
||||||
return_address: Optional[Address]
|
return_address: Optional[Address]
|
||||||
@ -600,6 +657,7 @@ class DewarUpdate(BaseModel):
|
|||||||
status: Optional[str] = None
|
status: Optional[str] = None
|
||||||
contact_id: Optional[int] = None
|
contact_id: Optional[int] = None
|
||||||
address_id: Optional[int] = None
|
address_id: Optional[int] = None
|
||||||
|
beamtime_ids: List[int] = []
|
||||||
|
|
||||||
|
|
||||||
class DewarSchema(BaseModel):
|
class DewarSchema(BaseModel):
|
||||||
@ -769,22 +827,16 @@ class PuckWithTellPosition(BaseModel):
|
|||||||
from_attributes = True
|
from_attributes = True
|
||||||
|
|
||||||
|
|
||||||
class Beamtime(BaseModel):
|
class PuckResponse(BaseModel):
|
||||||
id: int
|
id: int
|
||||||
pgroups: str
|
puck_name: str
|
||||||
beamtime_name: str
|
|
||||||
beamline: str
|
|
||||||
start_date: date
|
|
||||||
end_date: date
|
|
||||||
status: str
|
|
||||||
comments: Optional[constr(max_length=200)] = None
|
|
||||||
proposal_id: Optional[int]
|
|
||||||
proposal: Optional[Proposal]
|
|
||||||
local_contact_id: Optional[int]
|
|
||||||
local_contact: Optional[LocalContact]
|
|
||||||
|
|
||||||
class Config:
|
|
||||||
from_attributes = True
|
class DewarWithPucksResponse(BaseModel):
|
||||||
|
id: int
|
||||||
|
dewar_name: str
|
||||||
|
created_at: datetime
|
||||||
|
pucks: List[PuckResponse]
|
||||||
|
|
||||||
|
|
||||||
class ImageCreate(BaseModel):
|
class ImageCreate(BaseModel):
|
||||||
@ -898,9 +950,17 @@ class BeamlineParameters(BaseModel):
|
|||||||
# beamstopDiameter_mm: Optional[float] = None
|
# beamstopDiameter_mm: Optional[float] = None
|
||||||
|
|
||||||
|
|
||||||
|
class Datasets(BaseModel):
|
||||||
|
filepath: str
|
||||||
|
status: str
|
||||||
|
written_at: datetime
|
||||||
|
|
||||||
|
|
||||||
class ExperimentParametersBase(BaseModel):
|
class ExperimentParametersBase(BaseModel):
|
||||||
run_number: int
|
run_number: int
|
||||||
|
type: str
|
||||||
beamline_parameters: Optional[BeamlineParameters] = None
|
beamline_parameters: Optional[BeamlineParameters] = None
|
||||||
|
dataset: Optional[Datasets] = None
|
||||||
sample_id: int
|
sample_id: int
|
||||||
|
|
||||||
|
|
||||||
@ -915,6 +975,12 @@ class ExperimentParametersRead(ExperimentParametersBase):
|
|||||||
from_attributes = True
|
from_attributes = True
|
||||||
|
|
||||||
|
|
||||||
|
class ExperimentParametersUpdate(BaseModel):
|
||||||
|
run_number: int
|
||||||
|
dataset: Optional[Datasets]
|
||||||
|
sample_id: int
|
||||||
|
|
||||||
|
|
||||||
class SampleResult(BaseModel):
|
class SampleResult(BaseModel):
|
||||||
sample_id: int
|
sample_id: int
|
||||||
sample_name: str
|
sample_name: str
|
||||||
@ -926,6 +992,7 @@ class SampleResult(BaseModel):
|
|||||||
|
|
||||||
class ResultCreate(BaseModel):
|
class ResultCreate(BaseModel):
|
||||||
sample_id: int
|
sample_id: int
|
||||||
|
status: str
|
||||||
run_id: int
|
run_id: int
|
||||||
result: Results
|
result: Results
|
||||||
|
|
||||||
@ -935,9 +1002,46 @@ class ResultCreate(BaseModel):
|
|||||||
|
|
||||||
class ResultResponse(BaseModel):
|
class ResultResponse(BaseModel):
|
||||||
id: int
|
id: int
|
||||||
|
status: str
|
||||||
sample_id: int
|
sample_id: int
|
||||||
run_id: int
|
run_id: int
|
||||||
result: Results
|
result: Results
|
||||||
|
|
||||||
class Config:
|
class Config:
|
||||||
from_attributes = True
|
from_attributes = True
|
||||||
|
|
||||||
|
|
||||||
|
class JobsCreate(BaseModel):
|
||||||
|
id: int
|
||||||
|
sample_id: int
|
||||||
|
run_id: int
|
||||||
|
sample_name: str
|
||||||
|
status: str
|
||||||
|
created_at: datetime
|
||||||
|
updated_at: datetime
|
||||||
|
experiment_parameters: BeamlineParameters
|
||||||
|
slurm_id: Optional[int] = None
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
from_attributes = True
|
||||||
|
|
||||||
|
|
||||||
|
class JobsResponse(BaseModel):
|
||||||
|
job_id: int
|
||||||
|
sample_id: int
|
||||||
|
run_id: int
|
||||||
|
sample_name: str
|
||||||
|
status: str
|
||||||
|
type: str
|
||||||
|
created_at: datetime
|
||||||
|
updated_at: Optional[datetime]
|
||||||
|
data_collection_parameters: Optional[DataCollectionParameters] = None
|
||||||
|
experiment_parameters: BeamlineParameters
|
||||||
|
filepath: Optional[str] = None
|
||||||
|
slurm_id: Optional[int] = None
|
||||||
|
|
||||||
|
|
||||||
|
class JobsUpdate(BaseModel):
|
||||||
|
job_id: int
|
||||||
|
status: str
|
||||||
|
slurm_id: int
|
||||||
|
@ -122,14 +122,28 @@ if environment == "dev":
|
|||||||
ssl_heidi.generate_self_signed_cert(cert_path, key_path)
|
ssl_heidi.generate_self_signed_cert(cert_path, key_path)
|
||||||
|
|
||||||
|
|
||||||
|
def cleanup_job_loop():
|
||||||
|
import time
|
||||||
|
from app.dependencies import get_db
|
||||||
|
from app.routers.processing import cleanup_cancelled_jobs
|
||||||
|
|
||||||
|
while True:
|
||||||
|
db = next(get_db())
|
||||||
|
try:
|
||||||
|
cleanup_cancelled_jobs(db)
|
||||||
|
finally:
|
||||||
|
db.close()
|
||||||
|
time.sleep(3600) # every hour
|
||||||
|
|
||||||
|
|
||||||
@asynccontextmanager
|
@asynccontextmanager
|
||||||
async def lifespan(app: FastAPI):
|
async def lifespan(app: FastAPI):
|
||||||
print("[INFO] Running application startup tasks...")
|
print("[INFO] Running application startup tasks...")
|
||||||
db = SessionLocal()
|
db = SessionLocal()
|
||||||
try:
|
try:
|
||||||
if environment == "prod":
|
if environment == "prod":
|
||||||
Base.metadata.drop_all(bind=engine)
|
# Base.metadata.drop_all(bind=engine)
|
||||||
Base.metadata.create_all(bind=engine)
|
# Base.metadata.create_all(bind=engine)
|
||||||
from sqlalchemy.engine import reflection
|
from sqlalchemy.engine import reflection
|
||||||
|
|
||||||
inspector = reflection.Inspector.from_engine(engine)
|
inspector = reflection.Inspector.from_engine(engine)
|
||||||
@ -174,6 +188,12 @@ async def lifespan(app: FastAPI):
|
|||||||
|
|
||||||
load_slots_data(db)
|
load_slots_data(db)
|
||||||
|
|
||||||
|
from threading import Thread
|
||||||
|
|
||||||
|
# Start cleanup in background thread
|
||||||
|
thread = Thread(target=cleanup_job_loop, daemon=True)
|
||||||
|
thread.start()
|
||||||
|
|
||||||
yield
|
yield
|
||||||
finally:
|
finally:
|
||||||
db.close()
|
db.close()
|
||||||
|
130
backend/propipe_sim.ipynb
Normal file
130
backend/propipe_sim.ipynb
Normal file
@ -0,0 +1,130 @@
|
|||||||
|
{
|
||||||
|
"cells": [
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"id": "initial_id",
|
||||||
|
"metadata": {
|
||||||
|
"collapsed": true,
|
||||||
|
"ExecuteTime": {
|
||||||
|
"end_time": "2025-04-30T09:22:17.261436Z",
|
||||||
|
"start_time": "2025-04-30T09:21:47.206494Z"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"source": [
|
||||||
|
"import requests\n",
|
||||||
|
"import sseclient\n",
|
||||||
|
"import json\n",
|
||||||
|
"\n",
|
||||||
|
"SSE_URL = \"https://127.0.0.1:8000/processing/jobs/stream\"\n",
|
||||||
|
"UPDATE_URL = \"https://127.0.0.1:8000/processing/jobs/update_status\"\n",
|
||||||
|
"\n",
|
||||||
|
"def submit_job_update(job_id, status, slurm_id):\n",
|
||||||
|
" payload = {\n",
|
||||||
|
" \"job_id\": job_id,\n",
|
||||||
|
" \"status\": status,\n",
|
||||||
|
" \"slurm_id\": slurm_id,\n",
|
||||||
|
" }\n",
|
||||||
|
" try:\n",
|
||||||
|
" response = requests.post(UPDATE_URL, json=payload, verify=False)\n",
|
||||||
|
" if response.status_code == 200:\n",
|
||||||
|
" print(f\"✅ Job {job_id} status updated to '{status}'. Response: {response.json()}\")\n",
|
||||||
|
" else:\n",
|
||||||
|
" print(f\"❌ Failed to update job {job_id}. Status: {response.status_code}. Response: {response.text}\")\n",
|
||||||
|
" except Exception as e:\n",
|
||||||
|
" print(f\"Failed to submit update for Job {job_id}: {e}\")\n",
|
||||||
|
"\n",
|
||||||
|
"def listen_and_update_jobs(url):\n",
|
||||||
|
" print(\"Starting job status updater...\")\n",
|
||||||
|
" with requests.get(url, stream=True, verify=False) as response:\n",
|
||||||
|
" if response.status_code != 200:\n",
|
||||||
|
" print(f\"Failed to connect with status code: {response.status_code}\")\n",
|
||||||
|
" return\n",
|
||||||
|
"\n",
|
||||||
|
" client = sseclient.SSEClient(response)\n",
|
||||||
|
"\n",
|
||||||
|
" for event in client.events():\n",
|
||||||
|
" try:\n",
|
||||||
|
" jobs = json.loads(event.data)\n",
|
||||||
|
" print(f\"Jobs received: {jobs}\")\n",
|
||||||
|
"\n",
|
||||||
|
" for job in jobs:\n",
|
||||||
|
" job_id = job.get(\"job_id\")\n",
|
||||||
|
" print(f\"Job ID: {job_id}, Current status: {job.get('status')}\")\n",
|
||||||
|
" # Immediately update status to \"submitted\"\n",
|
||||||
|
" submit_job_update(job_id, \"submitted\", 76545678)\n",
|
||||||
|
" except json.JSONDecodeError as e:\n",
|
||||||
|
" print(f\"Error decoding event data: {e}\")\n",
|
||||||
|
" except Exception as e:\n",
|
||||||
|
" print(f\"Unexpected error while processing event: {e}\")\n",
|
||||||
|
"\n",
|
||||||
|
"if __name__ == \"__main__\":\n",
|
||||||
|
" listen_and_update_jobs(SSE_URL)\n"
|
||||||
|
],
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"name": "stdout",
|
||||||
|
"output_type": "stream",
|
||||||
|
"text": [
|
||||||
|
"Starting job status updater...\n",
|
||||||
|
"Jobs received: [{'job_id': 4, 'sample_id': 204, 'run_id': 1, 'sample_name': 'Sample204', 'status': 'todo', 'type': 'standard', 'created_at': '2025-04-30T09:05:14.901478', 'updated_at': None, 'data_collection_parameters': None, 'experiment_parameters': {'synchrotron': 'Swiss Light Source', 'beamline': 'PXIII', 'detector': {'manufacturer': 'DECTRIS', 'model': 'PILATUS4 2M', 'type': 'photon-counting', 'serialNumber': '16684dscsd668468', 'detectorDistance_mm': 95.0, 'beamCenterX_px': 512.0, 'beamCenterY_px': 512.0, 'pixelSizeX_um': 150.0, 'pixelSizeY_um': 150.0}, 'wavelength': 1.0, 'ringCurrent_A': 0.0, 'ringMode': 'Machine Down', 'undulator': None, 'undulatorgap_mm': None, 'monochromator': 'Si111', 'transmission': 1.0, 'focusingOptic': 'Kirkpatrick-Baez', 'beamlineFluxAtSample_ph_s': 0.0, 'beamSizeWidth': 30.0, 'beamSizeHeight': 30.0, 'characterization': None, 'rotation': {'omegaStart_deg': 0.0, 'omegaStep': 0.1, 'chi': 0.0, 'phi': 10.0, 'numberOfImages': 3600, 'exposureTime_s': 0.02}, 'gridScan': None, 'jet': None, 'cryojetTemperature_K': None, 'humidifierTemperature_K': None, 'humidifierHumidity': None}, 'filepath': '/das/work/p11/p11206/raw_data/vincent/20250415_6D_SLS2_1st_data/20250415_fullbeam_dtz220_Lyso102_again_360deg', 'slurm_id': None}]\n",
|
||||||
|
"Job ID: 4, Current status: todo\n",
|
||||||
|
"✅ Job 4 status updated to 'submitted'. Response: {'job_id': 4, 'status': 'submitted', 'slurm_id': 76545678}\n"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "stderr",
|
||||||
|
"output_type": "stream",
|
||||||
|
"text": [
|
||||||
|
"/Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/urllib3/connectionpool.py:1103: InsecureRequestWarning: Unverified HTTPS request is being made to host '127.0.0.1'. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.io/en/latest/advanced-usage.html#tls-warnings\n",
|
||||||
|
" warnings.warn(\n",
|
||||||
|
"/Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/urllib3/connectionpool.py:1103: InsecureRequestWarning: Unverified HTTPS request is being made to host '127.0.0.1'. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.io/en/latest/advanced-usage.html#tls-warnings\n",
|
||||||
|
" warnings.warn(\n"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ename": "KeyboardInterrupt",
|
||||||
|
"evalue": "",
|
||||||
|
"output_type": "error",
|
||||||
|
"traceback": [
|
||||||
|
"\u001B[0;31m---------------------------------------------------------------------------\u001B[0m",
|
||||||
|
"\u001B[0;31mKeyboardInterrupt\u001B[0m Traceback (most recent call last)",
|
||||||
|
"Cell \u001B[0;32mIn[14], line 48\u001B[0m\n\u001B[1;32m 45\u001B[0m \u001B[38;5;28mprint\u001B[39m(\u001B[38;5;124mf\u001B[39m\u001B[38;5;124m\"\u001B[39m\u001B[38;5;124mUnexpected error while processing event: \u001B[39m\u001B[38;5;132;01m{\u001B[39;00me\u001B[38;5;132;01m}\u001B[39;00m\u001B[38;5;124m\"\u001B[39m)\n\u001B[1;32m 47\u001B[0m \u001B[38;5;28;01mif\u001B[39;00m \u001B[38;5;18m__name__\u001B[39m \u001B[38;5;241m==\u001B[39m \u001B[38;5;124m\"\u001B[39m\u001B[38;5;124m__main__\u001B[39m\u001B[38;5;124m\"\u001B[39m:\n\u001B[0;32m---> 48\u001B[0m \u001B[43mlisten_and_update_jobs\u001B[49m\u001B[43m(\u001B[49m\u001B[43mSSE_URL\u001B[49m\u001B[43m)\u001B[49m\n",
|
||||||
|
"Cell \u001B[0;32mIn[14], line 32\u001B[0m, in \u001B[0;36mlisten_and_update_jobs\u001B[0;34m(url)\u001B[0m\n\u001B[1;32m 28\u001B[0m \u001B[38;5;28;01mreturn\u001B[39;00m\n\u001B[1;32m 30\u001B[0m client \u001B[38;5;241m=\u001B[39m sseclient\u001B[38;5;241m.\u001B[39mSSEClient(response)\n\u001B[0;32m---> 32\u001B[0m \u001B[43m\u001B[49m\u001B[38;5;28;43;01mfor\u001B[39;49;00m\u001B[43m \u001B[49m\u001B[43mevent\u001B[49m\u001B[43m \u001B[49m\u001B[38;5;129;43;01min\u001B[39;49;00m\u001B[43m \u001B[49m\u001B[43mclient\u001B[49m\u001B[38;5;241;43m.\u001B[39;49m\u001B[43mevents\u001B[49m\u001B[43m(\u001B[49m\u001B[43m)\u001B[49m\u001B[43m:\u001B[49m\n\u001B[1;32m 33\u001B[0m \u001B[43m \u001B[49m\u001B[38;5;28;43;01mtry\u001B[39;49;00m\u001B[43m:\u001B[49m\n\u001B[1;32m 34\u001B[0m \u001B[43m \u001B[49m\u001B[43mjobs\u001B[49m\u001B[43m \u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43m \u001B[49m\u001B[43mjson\u001B[49m\u001B[38;5;241;43m.\u001B[39;49m\u001B[43mloads\u001B[49m\u001B[43m(\u001B[49m\u001B[43mevent\u001B[49m\u001B[38;5;241;43m.\u001B[39;49m\u001B[43mdata\u001B[49m\u001B[43m)\u001B[49m\n",
|
||||||
|
"File \u001B[0;32m/Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/sseclient/__init__.py:55\u001B[0m, in \u001B[0;36mSSEClient.events\u001B[0;34m(self)\u001B[0m\n\u001B[1;32m 54\u001B[0m \u001B[38;5;28;01mdef\u001B[39;00m\u001B[38;5;250m \u001B[39m\u001B[38;5;21mevents\u001B[39m(\u001B[38;5;28mself\u001B[39m):\n\u001B[0;32m---> 55\u001B[0m \u001B[43m \u001B[49m\u001B[38;5;28;43;01mfor\u001B[39;49;00m\u001B[43m \u001B[49m\u001B[43mchunk\u001B[49m\u001B[43m \u001B[49m\u001B[38;5;129;43;01min\u001B[39;49;00m\u001B[43m \u001B[49m\u001B[38;5;28;43mself\u001B[39;49m\u001B[38;5;241;43m.\u001B[39;49m\u001B[43m_read\u001B[49m\u001B[43m(\u001B[49m\u001B[43m)\u001B[49m\u001B[43m:\u001B[49m\n\u001B[1;32m 56\u001B[0m \u001B[43m \u001B[49m\u001B[43mevent\u001B[49m\u001B[43m \u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43m \u001B[49m\u001B[43mEvent\u001B[49m\u001B[43m(\u001B[49m\u001B[43m)\u001B[49m\n\u001B[1;32m 57\u001B[0m \u001B[43m \u001B[49m\u001B[38;5;66;43;03m# Split before decoding so splitlines() only uses \\r and \\n\u001B[39;49;00m\n",
|
||||||
|
"File \u001B[0;32m/Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/sseclient/__init__.py:45\u001B[0m, in \u001B[0;36mSSEClient._read\u001B[0;34m(self)\u001B[0m\n\u001B[1;32m 38\u001B[0m \u001B[38;5;250m\u001B[39m\u001B[38;5;124;03m\"\"\"Read the incoming event source stream and yield event chunks.\u001B[39;00m\n\u001B[1;32m 39\u001B[0m \n\u001B[1;32m 40\u001B[0m \u001B[38;5;124;03mUnfortunately it is possible for some servers to decide to break an\u001B[39;00m\n\u001B[1;32m 41\u001B[0m \u001B[38;5;124;03mevent into multiple HTTP chunks in the response. It is thus necessary\u001B[39;00m\n\u001B[1;32m 42\u001B[0m \u001B[38;5;124;03mto correctly stitch together consecutive response chunks and find the\u001B[39;00m\n\u001B[1;32m 43\u001B[0m \u001B[38;5;124;03mSSE delimiter (empty new line) to yield full, correct event chunks.\"\"\"\u001B[39;00m\n\u001B[1;32m 44\u001B[0m data \u001B[38;5;241m=\u001B[39m \u001B[38;5;124mb\u001B[39m\u001B[38;5;124m'\u001B[39m\u001B[38;5;124m'\u001B[39m\n\u001B[0;32m---> 45\u001B[0m \u001B[43m\u001B[49m\u001B[38;5;28;43;01mfor\u001B[39;49;00m\u001B[43m \u001B[49m\u001B[43mchunk\u001B[49m\u001B[43m \u001B[49m\u001B[38;5;129;43;01min\u001B[39;49;00m\u001B[43m \u001B[49m\u001B[38;5;28;43mself\u001B[39;49m\u001B[38;5;241;43m.\u001B[39;49m\u001B[43m_event_source\u001B[49m\u001B[43m:\u001B[49m\n\u001B[1;32m 46\u001B[0m \u001B[43m \u001B[49m\u001B[38;5;28;43;01mfor\u001B[39;49;00m\u001B[43m \u001B[49m\u001B[43mline\u001B[49m\u001B[43m \u001B[49m\u001B[38;5;129;43;01min\u001B[39;49;00m\u001B[43m \u001B[49m\u001B[43mchunk\u001B[49m\u001B[38;5;241;43m.\u001B[39;49m\u001B[43msplitlines\u001B[49m\u001B[43m(\u001B[49m\u001B[38;5;28;43;01mTrue\u001B[39;49;00m\u001B[43m)\u001B[49m\u001B[43m:\u001B[49m\n\u001B[1;32m 47\u001B[0m \u001B[43m \u001B[49m\u001B[43mdata\u001B[49m\u001B[43m \u001B[49m\u001B[38;5;241;43m+\u001B[39;49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43m \u001B[49m\u001B[43mline\u001B[49m\n",
|
||||||
|
"File \u001B[0;32m/Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/requests/models.py:820\u001B[0m, in \u001B[0;36mResponse.iter_content.<locals>.generate\u001B[0;34m()\u001B[0m\n\u001B[1;32m 818\u001B[0m \u001B[38;5;28;01mif\u001B[39;00m \u001B[38;5;28mhasattr\u001B[39m(\u001B[38;5;28mself\u001B[39m\u001B[38;5;241m.\u001B[39mraw, \u001B[38;5;124m\"\u001B[39m\u001B[38;5;124mstream\u001B[39m\u001B[38;5;124m\"\u001B[39m):\n\u001B[1;32m 819\u001B[0m \u001B[38;5;28;01mtry\u001B[39;00m:\n\u001B[0;32m--> 820\u001B[0m \u001B[38;5;28;01myield from\u001B[39;00m \u001B[38;5;28mself\u001B[39m\u001B[38;5;241m.\u001B[39mraw\u001B[38;5;241m.\u001B[39mstream(chunk_size, decode_content\u001B[38;5;241m=\u001B[39m\u001B[38;5;28;01mTrue\u001B[39;00m)\n\u001B[1;32m 821\u001B[0m \u001B[38;5;28;01mexcept\u001B[39;00m ProtocolError \u001B[38;5;28;01mas\u001B[39;00m e:\n\u001B[1;32m 822\u001B[0m \u001B[38;5;28;01mraise\u001B[39;00m ChunkedEncodingError(e)\n",
|
||||||
|
"File \u001B[0;32m/Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/urllib3/response.py:1040\u001B[0m, in \u001B[0;36mHTTPResponse.stream\u001B[0;34m(self, amt, decode_content)\u001B[0m\n\u001B[1;32m 1024\u001B[0m \u001B[38;5;250m\u001B[39m\u001B[38;5;124;03m\"\"\"\u001B[39;00m\n\u001B[1;32m 1025\u001B[0m \u001B[38;5;124;03mA generator wrapper for the read() method. A call will block until\u001B[39;00m\n\u001B[1;32m 1026\u001B[0m \u001B[38;5;124;03m``amt`` bytes have been read from the connection or until the\u001B[39;00m\n\u001B[0;32m (...)\u001B[0m\n\u001B[1;32m 1037\u001B[0m \u001B[38;5;124;03m 'content-encoding' header.\u001B[39;00m\n\u001B[1;32m 1038\u001B[0m \u001B[38;5;124;03m\"\"\"\u001B[39;00m\n\u001B[1;32m 1039\u001B[0m \u001B[38;5;28;01mif\u001B[39;00m \u001B[38;5;28mself\u001B[39m\u001B[38;5;241m.\u001B[39mchunked \u001B[38;5;129;01mand\u001B[39;00m \u001B[38;5;28mself\u001B[39m\u001B[38;5;241m.\u001B[39msupports_chunked_reads():\n\u001B[0;32m-> 1040\u001B[0m \u001B[38;5;28;01myield from\u001B[39;00m \u001B[38;5;28mself\u001B[39m\u001B[38;5;241m.\u001B[39mread_chunked(amt, decode_content\u001B[38;5;241m=\u001B[39mdecode_content)\n\u001B[1;32m 1041\u001B[0m \u001B[38;5;28;01melse\u001B[39;00m:\n\u001B[1;32m 1042\u001B[0m \u001B[38;5;28;01mwhile\u001B[39;00m \u001B[38;5;129;01mnot\u001B[39;00m is_fp_closed(\u001B[38;5;28mself\u001B[39m\u001B[38;5;241m.\u001B[39m_fp) \u001B[38;5;129;01mor\u001B[39;00m \u001B[38;5;28mlen\u001B[39m(\u001B[38;5;28mself\u001B[39m\u001B[38;5;241m.\u001B[39m_decoded_buffer) \u001B[38;5;241m>\u001B[39m \u001B[38;5;241m0\u001B[39m:\n",
|
||||||
|
"File \u001B[0;32m/Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/urllib3/response.py:1184\u001B[0m, in \u001B[0;36mHTTPResponse.read_chunked\u001B[0;34m(self, amt, decode_content)\u001B[0m\n\u001B[1;32m 1181\u001B[0m \u001B[38;5;28;01mreturn\u001B[39;00m \u001B[38;5;28;01mNone\u001B[39;00m\n\u001B[1;32m 1183\u001B[0m \u001B[38;5;28;01mwhile\u001B[39;00m \u001B[38;5;28;01mTrue\u001B[39;00m:\n\u001B[0;32m-> 1184\u001B[0m \u001B[38;5;28;43mself\u001B[39;49m\u001B[38;5;241;43m.\u001B[39;49m\u001B[43m_update_chunk_length\u001B[49m\u001B[43m(\u001B[49m\u001B[43m)\u001B[49m\n\u001B[1;32m 1185\u001B[0m \u001B[38;5;28;01mif\u001B[39;00m \u001B[38;5;28mself\u001B[39m\u001B[38;5;241m.\u001B[39mchunk_left \u001B[38;5;241m==\u001B[39m \u001B[38;5;241m0\u001B[39m:\n\u001B[1;32m 1186\u001B[0m \u001B[38;5;28;01mbreak\u001B[39;00m\n",
|
||||||
|
"File \u001B[0;32m/Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/urllib3/response.py:1108\u001B[0m, in \u001B[0;36mHTTPResponse._update_chunk_length\u001B[0;34m(self)\u001B[0m\n\u001B[1;32m 1106\u001B[0m \u001B[38;5;28;01mif\u001B[39;00m \u001B[38;5;28mself\u001B[39m\u001B[38;5;241m.\u001B[39mchunk_left \u001B[38;5;129;01mis\u001B[39;00m \u001B[38;5;129;01mnot\u001B[39;00m \u001B[38;5;28;01mNone\u001B[39;00m:\n\u001B[1;32m 1107\u001B[0m \u001B[38;5;28;01mreturn\u001B[39;00m \u001B[38;5;28;01mNone\u001B[39;00m\n\u001B[0;32m-> 1108\u001B[0m line \u001B[38;5;241m=\u001B[39m \u001B[38;5;28;43mself\u001B[39;49m\u001B[38;5;241;43m.\u001B[39;49m\u001B[43m_fp\u001B[49m\u001B[38;5;241;43m.\u001B[39;49m\u001B[43mfp\u001B[49m\u001B[38;5;241;43m.\u001B[39;49m\u001B[43mreadline\u001B[49m\u001B[43m(\u001B[49m\u001B[43m)\u001B[49m \u001B[38;5;66;03m# type: ignore[union-attr]\u001B[39;00m\n\u001B[1;32m 1109\u001B[0m line \u001B[38;5;241m=\u001B[39m line\u001B[38;5;241m.\u001B[39msplit(\u001B[38;5;124mb\u001B[39m\u001B[38;5;124m\"\u001B[39m\u001B[38;5;124m;\u001B[39m\u001B[38;5;124m\"\u001B[39m, \u001B[38;5;241m1\u001B[39m)[\u001B[38;5;241m0\u001B[39m]\n\u001B[1;32m 1110\u001B[0m \u001B[38;5;28;01mtry\u001B[39;00m:\n",
|
||||||
|
"File \u001B[0;32m/Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/socket.py:707\u001B[0m, in \u001B[0;36mSocketIO.readinto\u001B[0;34m(self, b)\u001B[0m\n\u001B[1;32m 705\u001B[0m \u001B[38;5;28;01mwhile\u001B[39;00m \u001B[38;5;28;01mTrue\u001B[39;00m:\n\u001B[1;32m 706\u001B[0m \u001B[38;5;28;01mtry\u001B[39;00m:\n\u001B[0;32m--> 707\u001B[0m \u001B[38;5;28;01mreturn\u001B[39;00m \u001B[38;5;28;43mself\u001B[39;49m\u001B[38;5;241;43m.\u001B[39;49m\u001B[43m_sock\u001B[49m\u001B[38;5;241;43m.\u001B[39;49m\u001B[43mrecv_into\u001B[49m\u001B[43m(\u001B[49m\u001B[43mb\u001B[49m\u001B[43m)\u001B[49m\n\u001B[1;32m 708\u001B[0m \u001B[38;5;28;01mexcept\u001B[39;00m timeout:\n\u001B[1;32m 709\u001B[0m \u001B[38;5;28mself\u001B[39m\u001B[38;5;241m.\u001B[39m_timeout_occurred \u001B[38;5;241m=\u001B[39m \u001B[38;5;28;01mTrue\u001B[39;00m\n",
|
||||||
|
"File \u001B[0;32m/Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/ssl.py:1216\u001B[0m, in \u001B[0;36mSSLSocket.recv_into\u001B[0;34m(self, buffer, nbytes, flags)\u001B[0m\n\u001B[1;32m 1212\u001B[0m \u001B[38;5;28;01mif\u001B[39;00m flags \u001B[38;5;241m!=\u001B[39m \u001B[38;5;241m0\u001B[39m:\n\u001B[1;32m 1213\u001B[0m \u001B[38;5;28;01mraise\u001B[39;00m \u001B[38;5;167;01mValueError\u001B[39;00m(\n\u001B[1;32m 1214\u001B[0m \u001B[38;5;124m\"\u001B[39m\u001B[38;5;124mnon-zero flags not allowed in calls to recv_into() on \u001B[39m\u001B[38;5;132;01m%s\u001B[39;00m\u001B[38;5;124m\"\u001B[39m \u001B[38;5;241m%\u001B[39m\n\u001B[1;32m 1215\u001B[0m \u001B[38;5;28mself\u001B[39m\u001B[38;5;241m.\u001B[39m\u001B[38;5;18m__class__\u001B[39m)\n\u001B[0;32m-> 1216\u001B[0m \u001B[38;5;28;01mreturn\u001B[39;00m \u001B[38;5;28;43mself\u001B[39;49m\u001B[38;5;241;43m.\u001B[39;49m\u001B[43mread\u001B[49m\u001B[43m(\u001B[49m\u001B[43mnbytes\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mbuffer\u001B[49m\u001B[43m)\u001B[49m\n\u001B[1;32m 1217\u001B[0m \u001B[38;5;28;01melse\u001B[39;00m:\n\u001B[1;32m 1218\u001B[0m \u001B[38;5;28;01mreturn\u001B[39;00m \u001B[38;5;28msuper\u001B[39m()\u001B[38;5;241m.\u001B[39mrecv_into(buffer, nbytes, flags)\n",
|
||||||
|
"File \u001B[0;32m/Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/ssl.py:1072\u001B[0m, in \u001B[0;36mSSLSocket.read\u001B[0;34m(self, len, buffer)\u001B[0m\n\u001B[1;32m 1070\u001B[0m \u001B[38;5;28;01mtry\u001B[39;00m:\n\u001B[1;32m 1071\u001B[0m \u001B[38;5;28;01mif\u001B[39;00m buffer \u001B[38;5;129;01mis\u001B[39;00m \u001B[38;5;129;01mnot\u001B[39;00m \u001B[38;5;28;01mNone\u001B[39;00m:\n\u001B[0;32m-> 1072\u001B[0m \u001B[38;5;28;01mreturn\u001B[39;00m \u001B[38;5;28;43mself\u001B[39;49m\u001B[38;5;241;43m.\u001B[39;49m\u001B[43m_sslobj\u001B[49m\u001B[38;5;241;43m.\u001B[39;49m\u001B[43mread\u001B[49m\u001B[43m(\u001B[49m\u001B[38;5;28;43mlen\u001B[39;49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mbuffer\u001B[49m\u001B[43m)\u001B[49m\n\u001B[1;32m 1073\u001B[0m \u001B[38;5;28;01melse\u001B[39;00m:\n\u001B[1;32m 1074\u001B[0m \u001B[38;5;28;01mreturn\u001B[39;00m \u001B[38;5;28mself\u001B[39m\u001B[38;5;241m.\u001B[39m_sslobj\u001B[38;5;241m.\u001B[39mread(\u001B[38;5;28mlen\u001B[39m)\n",
|
||||||
|
"\u001B[0;31mKeyboardInterrupt\u001B[0m: "
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"execution_count": 14
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"kernelspec": {
|
||||||
|
"display_name": "Python 3",
|
||||||
|
"language": "python",
|
||||||
|
"name": "python3"
|
||||||
|
},
|
||||||
|
"language_info": {
|
||||||
|
"codemirror_mode": {
|
||||||
|
"name": "ipython",
|
||||||
|
"version": 2
|
||||||
|
},
|
||||||
|
"file_extension": ".py",
|
||||||
|
"mimetype": "text/x-python",
|
||||||
|
"name": "python",
|
||||||
|
"nbconvert_exporter": "python",
|
||||||
|
"pygments_lexer": "ipython2",
|
||||||
|
"version": "2.7.6"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"nbformat": 4,
|
||||||
|
"nbformat_minor": 5
|
||||||
|
}
|
@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
|||||||
|
|
||||||
[project]
|
[project]
|
||||||
name = "aareDB"
|
name = "aareDB"
|
||||||
version = "0.1.1a1"
|
version = "0.1.1a3"
|
||||||
description = "Backend for next gen sample management system"
|
description = "Backend for next gen sample management system"
|
||||||
authors = [{name = "Guillaume Gotthard", email = "guillaume.gotthard@psi.ch"}]
|
authors = [{name = "Guillaume Gotthard", email = "guillaume.gotthard@psi.ch"}]
|
||||||
license = {text = "MIT"}
|
license = {text = "MIT"}
|
||||||
@ -29,7 +29,8 @@ dependencies = [
|
|||||||
"python-dateutil~=2.8.2",
|
"python-dateutil~=2.8.2",
|
||||||
"tomli>=2.0.1",
|
"tomli>=2.0.1",
|
||||||
"python-dotenv",
|
"python-dotenv",
|
||||||
"psycopg2-binary"
|
"psycopg2-binary",
|
||||||
|
"urllib3~=2.2.1"
|
||||||
]
|
]
|
||||||
[tool.pytest.ini_options]
|
[tool.pytest.ini_options]
|
||||||
norecursedirs = ["backend/python-client"]
|
norecursedirs = ["backend/python-client"]
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
{
|
{
|
||||||
"ssl_cert_path": "ssl/cert.pem",
|
"ssl_cert_path": "ssl/cert.pem",
|
||||||
"ssl_key_path": "ssl/key.pem",
|
"ssl_key_path": "ssl/key.pem",
|
||||||
"OPENAPI_URL": "https://127.0.0.1:8000/openapi.json",
|
"OPENAPI_URL": "https://0.0.0.0:8000/openapi.json",
|
||||||
"SCHEMA_PATH": "./src/openapi.json",
|
"SCHEMA_PATH": "./src/openapi.json",
|
||||||
"OUTPUT_DIRECTORY": "./openapi",
|
"OUTPUT_DIRECTORY": "./openapi",
|
||||||
"PORT": 8000,
|
"PORT": 8000,
|
||||||
|
@ -13,13 +13,16 @@ services:
|
|||||||
- ./app:/app/app # Map app directory to /app/app
|
- ./app:/app/app # Map app directory to /app/app
|
||||||
- ./config_${ENVIRONMENT}.json:/app/backend/config_${ENVIRONMENT}.json # Explicitly map config_dev.json
|
- ./config_${ENVIRONMENT}.json:/app/backend/config_${ENVIRONMENT}.json # Explicitly map config_dev.json
|
||||||
- ./backend/ssl:/app/backend/ssl # clearly mount SSL files explicitly into Docker
|
- ./backend/ssl:/app/backend/ssl # clearly mount SSL files explicitly into Docker
|
||||||
|
- ./uploads:/app/backend/uploads
|
||||||
|
- ./uploads:/app/backend/images
|
||||||
|
|
||||||
working_dir: /app/backend # Set working directory to backend/
|
working_dir: /app/backend # Set working directory to backend/
|
||||||
command: python main.py # Command to run main.py
|
command: python main.py # Command to run main.py
|
||||||
depends_on: # ⬅️ New addition: wait until postgres is started
|
depends_on: # ⬅️ New addition: wait until postgres is started
|
||||||
- postgres
|
- postgres
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: [ "CMD-SHELL", "curl -k -f https://localhost:${PORT}/openapi.json || exit 1" ]
|
test: [ "CMD-SHELL", "curl -k -f https://localhost:${PORT}/openapi.json || exit 1" ]
|
||||||
interval: 5s
|
interval: 30s
|
||||||
timeout: 5s
|
timeout: 5s
|
||||||
retries: 5
|
retries: 5
|
||||||
environment: # ⬅️ Provide DB info to your backend
|
environment: # ⬅️ Provide DB info to your backend
|
||||||
@ -39,7 +42,7 @@ services:
|
|||||||
ports:
|
ports:
|
||||||
- "5432:5432"
|
- "5432:5432"
|
||||||
volumes:
|
volumes:
|
||||||
- pgdata:/var/lib/postgresql/data
|
- ./db_data:/var/lib/postgresql/data
|
||||||
|
|
||||||
|
|
||||||
frontend:
|
frontend:
|
||||||
|
7793
frontend/package-lock.json
generated
7793
frontend/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -12,6 +12,7 @@ import AddressManager from './pages/AddressManagerView';
|
|||||||
import ContactsManager from './pages/ContactsManagerView';
|
import ContactsManager from './pages/ContactsManagerView';
|
||||||
import LoginView from './pages/LoginView';
|
import LoginView from './pages/LoginView';
|
||||||
import ProtectedRoute from './components/ProtectedRoute';
|
import ProtectedRoute from './components/ProtectedRoute';
|
||||||
|
import BeamtimeOverview from './components/BeamtimeOverview';
|
||||||
|
|
||||||
const App: React.FC = () => {
|
const App: React.FC = () => {
|
||||||
const [openAddressManager, setOpenAddressManager] = useState(false);
|
const [openAddressManager, setOpenAddressManager] = useState(false);
|
||||||
@ -65,10 +66,11 @@ const App: React.FC = () => {
|
|||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
const handlePgroupChange = (newPgroup: string) => {
|
const handlePgroupChange = (newPgroup: string) => {
|
||||||
setActivePgroup(newPgroup);
|
setActivePgroup(newPgroup); // Updates active pgroup state in App
|
||||||
console.log(`pgroup changed to: ${newPgroup}`);
|
console.log(`pgroup changed to: ${newPgroup}`);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<Router>
|
<Router>
|
||||||
<ResponsiveAppBar
|
<ResponsiveAppBar
|
||||||
@ -82,9 +84,61 @@ const App: React.FC = () => {
|
|||||||
<Routes>
|
<Routes>
|
||||||
<Route path="/login" element={<LoginView />} />
|
<Route path="/login" element={<LoginView />} />
|
||||||
<Route path="/" element={<ProtectedRoute element={<HomePage />} />} />
|
<Route path="/" element={<ProtectedRoute element={<HomePage />} />} />
|
||||||
<Route path="/shipments" element={<ProtectedRoute element={<ShipmentView pgroups={pgroups} activePgroup={activePgroup} />} />} />
|
<Route path="/shipments"
|
||||||
<Route path="/planning" element={<ProtectedRoute element={<PlanningView />} />} />
|
element={
|
||||||
<Route path="/results" element={<ProtectedRoute element={<ResultsView pgroups={pgroups} activePgroup={activePgroup} />} />} />
|
<ProtectedRoute
|
||||||
|
element={
|
||||||
|
<ShipmentView
|
||||||
|
pgroups={pgroups}
|
||||||
|
activePgroup={activePgroup}
|
||||||
|
/>
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
<Route path="/planning"
|
||||||
|
element={
|
||||||
|
<ProtectedRoute
|
||||||
|
element={
|
||||||
|
<PlanningView
|
||||||
|
pgroups={pgroups}
|
||||||
|
activePgroup={activePgroup}
|
||||||
|
onPgroupChange={handlePgroupChange}
|
||||||
|
/>
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
<Route
|
||||||
|
path="/results/:beamtimeId"
|
||||||
|
element={
|
||||||
|
<ProtectedRoute
|
||||||
|
element={
|
||||||
|
<ResultsView
|
||||||
|
onPgroupChange={handlePgroupChange}
|
||||||
|
currentPgroup={activePgroup}
|
||||||
|
/>
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
<Route
|
||||||
|
path="/beamtime-overview"
|
||||||
|
element={
|
||||||
|
<ProtectedRoute
|
||||||
|
element={
|
||||||
|
<BeamtimeOverview
|
||||||
|
activePgroup={activePgroup}
|
||||||
|
onPgroupChange={handlePgroupChange} // Pass this prop correctly
|
||||||
|
/>
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
<Route path="/results" element={<ProtectedRoute element={<BeamtimeOverview activePgroup={activePgroup} onPgroupChange={handlePgroupChange} />} />}/>
|
||||||
|
{/* Optionally, add a 404 fallback route */}
|
||||||
|
<Route path="*" element={<div>Page not found</div>} />
|
||||||
|
|
||||||
</Routes>
|
</Routes>
|
||||||
<Modal open={openAddressManager} onClose={handleCloseAddressManager} title="Address Management">
|
<Modal open={openAddressManager} onClose={handleCloseAddressManager} title="Address Management">
|
||||||
<AddressManager pgroups={pgroups} activePgroup={activePgroup} />
|
<AddressManager pgroups={pgroups} activePgroup={activePgroup} />
|
||||||
|
146
frontend/src/components/BeamtimeOverview.tsx
Normal file
146
frontend/src/components/BeamtimeOverview.tsx
Normal file
@ -0,0 +1,146 @@
|
|||||||
|
import React, { useEffect, useState } from 'react';
|
||||||
|
import { DataGridPremium, GridColDef } from '@mui/x-data-grid-premium';
|
||||||
|
import { useNavigate } from 'react-router-dom';
|
||||||
|
import {Beamtime, BeamtimesService} from '../../openapi';
|
||||||
|
import { Chip, Typography } from '@mui/material';
|
||||||
|
|
||||||
|
interface BeamtimeRecord {
|
||||||
|
id: number;
|
||||||
|
start_date: string;
|
||||||
|
end_date: string;
|
||||||
|
shift: string;
|
||||||
|
beamline: string;
|
||||||
|
local_contact: string;
|
||||||
|
pgroups: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface BeamtimeOverviewProps {
|
||||||
|
activePgroup: string;
|
||||||
|
onPgroupChange: (pgroup: string) => void; // Add callback to update the selected pgroup
|
||||||
|
}
|
||||||
|
|
||||||
|
const BeamtimeOverview: React.FC<BeamtimeOverviewProps> = ({ activePgroup, onPgroupChange }) => {
|
||||||
|
const [rows, setRows] = useState<BeamtimeRecord[]>([]);
|
||||||
|
const [isLoading, setIsLoading] = useState(false);
|
||||||
|
|
||||||
|
// For navigation
|
||||||
|
const navigate = useNavigate();
|
||||||
|
|
||||||
|
const renderPgroupChips = (pgroups: string, activePgroup: string) => {
|
||||||
|
// Safely handle pgroups as an array
|
||||||
|
const pgroupsArray = pgroups.split(",").map((pgroup: string) => pgroup.trim());
|
||||||
|
|
||||||
|
if (!pgroupsArray.length) {
|
||||||
|
return <Typography variant="body2">No associated pgroups</Typography>;
|
||||||
|
}
|
||||||
|
|
||||||
|
return pgroupsArray.map((pgroup: string) => (
|
||||||
|
<Chip
|
||||||
|
key={pgroup}
|
||||||
|
label={pgroup}
|
||||||
|
color={pgroup === activePgroup ? "primary" : "default"} // Highlight active pgroups
|
||||||
|
sx={{
|
||||||
|
margin: 0.5,
|
||||||
|
backgroundColor: pgroup === activePgroup ? '#19d238' : '#b0b0b0',
|
||||||
|
color: pgroup === activePgroup ? 'white' : 'black',
|
||||||
|
fontWeight: 'bold',
|
||||||
|
borderRadius: '8px',
|
||||||
|
height: '20px',
|
||||||
|
fontSize: '12px',
|
||||||
|
boxShadow: '0px 1px 3px rgba(0, 0, 0, 0.2)',
|
||||||
|
mr: 1,
|
||||||
|
mb: 1,
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
));
|
||||||
|
};
|
||||||
|
|
||||||
|
// Fetch beamtime records from the backend
|
||||||
|
const fetchBeamtimeRecords = async () => {
|
||||||
|
try {
|
||||||
|
setIsLoading(true);
|
||||||
|
const records = await BeamtimesService.getMyBeamtimesProtectedBeamtimesMyBeamtimesGet(activePgroup);
|
||||||
|
|
||||||
|
const mappedRecords: BeamtimeRecord[] = records.map((record: any) => ({
|
||||||
|
id: record.id,
|
||||||
|
start_date: record.start_date || 'N/A',
|
||||||
|
end_date: record.end_date || 'N/A',
|
||||||
|
shift: record.shift || 'N/A',
|
||||||
|
beamline: record.beamline || 'N/A',
|
||||||
|
local_contact: `${record.local_contact.firstname || "N/A"} ${record.local_contact.lastname || "N/A"}`,
|
||||||
|
pgroups: record.pgroups || '',
|
||||||
|
}));
|
||||||
|
|
||||||
|
setRows(mappedRecords);
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Failed to fetch beamtime records:', error);
|
||||||
|
} finally {
|
||||||
|
setIsLoading(false);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
fetchBeamtimeRecords();
|
||||||
|
}, [activePgroup]);
|
||||||
|
|
||||||
|
// Define table columns, including the "View Results" button
|
||||||
|
const columns: GridColDef<BeamtimeRecord>[] = [
|
||||||
|
{ field: 'start_date', headerName: 'Start Date', flex: 1 },
|
||||||
|
{ field: 'end_date', headerName: 'End Date', flex: 1 },
|
||||||
|
{ field: 'shift', headerName: "Shift", flex: 1 },
|
||||||
|
{ field: 'beamline', headerName: 'Beamline', flex: 1 },
|
||||||
|
{ field: 'local_contact', headerName: 'Local Contact', flex: 1 },
|
||||||
|
{
|
||||||
|
field: 'pgroups',
|
||||||
|
headerName: 'Pgroups',
|
||||||
|
flex: 2, // Slightly wider column for chips
|
||||||
|
renderCell: (params) => renderPgroupChips(params.row.pgroups, activePgroup),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
field: 'viewResults',
|
||||||
|
headerName: 'Actions',
|
||||||
|
flex: 1,
|
||||||
|
renderCell: (params) => (
|
||||||
|
<button
|
||||||
|
onClick={() => handleViewResults(params.row.id, params.row.pgroups)}
|
||||||
|
style={{
|
||||||
|
padding: '6px 12px',
|
||||||
|
backgroundColor: '#1976d2',
|
||||||
|
color: '#fff',
|
||||||
|
border: 'none',
|
||||||
|
borderRadius: '4px',
|
||||||
|
cursor: 'pointer',
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
View Results
|
||||||
|
</button>
|
||||||
|
),
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
// Navigate to the ResultsView page for the selected beamtime
|
||||||
|
const handleViewResults = (beamtimeId: number, pgroups: string) => {
|
||||||
|
const pgroupArray = pgroups.split(',').map((pgroup) => pgroup.trim());
|
||||||
|
const firstPgroup = pgroupArray[0] || ''; // Choose the first pgroup (or fallback to empty string)
|
||||||
|
|
||||||
|
// Ensure onPgroupChange is invoked correctly
|
||||||
|
onPgroupChange(firstPgroup);
|
||||||
|
|
||||||
|
// Navigate directly to the Results page with the correct pgroup in the query
|
||||||
|
navigate(`/results/${beamtimeId}?pgroup=${firstPgroup}`);
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div style={{ height: 400, width: '100%' }}>
|
||||||
|
<h2>Beamtime Overview</h2>
|
||||||
|
<DataGridPremium
|
||||||
|
rows={rows}
|
||||||
|
columns={columns}
|
||||||
|
loading={isLoading}
|
||||||
|
disableRowSelectionOnClick
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default BeamtimeOverview;
|
@ -4,42 +4,52 @@ import dayGridPlugin from '@fullcalendar/daygrid';
|
|||||||
import timeGridPlugin from '@fullcalendar/timegrid';
|
import timeGridPlugin from '@fullcalendar/timegrid';
|
||||||
import interactionPlugin from '@fullcalendar/interaction';
|
import interactionPlugin from '@fullcalendar/interaction';
|
||||||
import '../styles/Calendar.css';
|
import '../styles/Calendar.css';
|
||||||
|
import { BeamtimesService, DewarsService, PucksService } from '../../openapi';
|
||||||
|
import Chip from '@mui/material/Chip'
|
||||||
|
|
||||||
// Define colors for each beamline
|
|
||||||
const beamlineColors: { [key: string]: string } = {
|
const beamlineColors: { [key: string]: string } = {
|
||||||
PXI: '#FF5733',
|
X06SA: '#FF5733',
|
||||||
PXII: '#33FF57',
|
X10SA: '#33FF57',
|
||||||
PXIII: '#3357FF',
|
X06DA: '#3357FF',
|
||||||
Unknown: '#CCCCCC', // Gray color for unknown beamlines
|
Unknown: '#CCCCCC',
|
||||||
};
|
};
|
||||||
|
|
||||||
// Custom event interface
|
|
||||||
interface CustomEvent extends EventInput {
|
interface CustomEvent extends EventInput {
|
||||||
beamline: string;
|
beamline: string;
|
||||||
beamtime_shift: string;
|
beamtime_shift: string;
|
||||||
isSubmitted?: boolean; // Track if information is submitted
|
beamtime_id?: number;
|
||||||
|
isSubmitted?: boolean;
|
||||||
|
activePgroup?: string;
|
||||||
|
pgroups?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Define experiment modes
|
interface CalendarProps {
|
||||||
|
activePgroup: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
const experimentModes = ['SDU-Scheduled', 'SDU-queued', 'Remote', 'In-person'];
|
const experimentModes = ['SDU-Scheduled', 'SDU-queued', 'Remote', 'In-person'];
|
||||||
|
|
||||||
// Utility function to darken a hex color
|
|
||||||
const darkenColor = (color: string, percent: number): string => {
|
const darkenColor = (color: string, percent: number): string => {
|
||||||
const num = parseInt(color.slice(1), 16); // Convert hex to number
|
const num = parseInt(color.slice(1), 16);
|
||||||
const amt = Math.round(2.55 * percent); // Calculate amount to darken
|
const amt = Math.round(2.55 * percent);
|
||||||
const r = (num >> 16) + amt; // Red
|
const r = (num >> 16) + amt;
|
||||||
const g = (num >> 8 & 0x00FF) + amt; // Green
|
const g = (num >> 8 & 0x00FF) + amt;
|
||||||
const b = (num & 0x0000FF) + amt; // Blue
|
const b = (num & 0x0000FF) + amt;
|
||||||
|
const newColor = (0x1000000 + (r < 255 ? (r < 0 ? 0 : r) : 255) * 0x10000
|
||||||
// Ensure values stay within 0-255 range
|
+ (g < 255 ? (g < 0 ? 0 : g) : 255) * 0x100
|
||||||
const newColor = (0x1000000 + (r < 255 ? (r < 0 ? 0 : r) : 255) * 0x10000 + (g < 255 ? (g < 0 ? 0 : g) : 255) * 0x100 + (b < 255 ? (b < 0 ? 0 : b) : 255)).toString(16).slice(1);
|
+ (b < 255 ? (b < 0 ? 0 : b) : 255)).toString(16).slice(1);
|
||||||
return `#${newColor}`;
|
return `#${newColor}`;
|
||||||
};
|
};
|
||||||
|
|
||||||
const Calendar: React.FC = () => {
|
const Calendar = ({ activePgroup }: CalendarProps) => {
|
||||||
const [events, setEvents] = useState<CustomEvent[]>([]);
|
const [events, setEvents] = useState<CustomEvent[]>([]);
|
||||||
|
const [isLoading, setIsLoading] = useState(false);
|
||||||
|
const [fetchError, setFetchError] = useState<string | null>(null);
|
||||||
const [selectedEventId, setSelectedEventId] = useState<string | null>(null);
|
const [selectedEventId, setSelectedEventId] = useState<string | null>(null);
|
||||||
const [eventDetails, setEventDetails] = useState<CustomEvent | null>(null);
|
const [eventDetails, setEventDetails] = useState<CustomEvent | null>(null);
|
||||||
|
// eventId => { dewars: [dewar_id], pucks: [puck_id] }
|
||||||
|
const [eventAssociations, setEventAssociations] = useState<{ [eventId: string]: { dewars: string[], pucks: string[] } }>({});
|
||||||
const [userDetails, setUserDetails] = useState({
|
const [userDetails, setUserDetails] = useState({
|
||||||
name: '',
|
name: '',
|
||||||
firstName: '',
|
firstName: '',
|
||||||
@ -48,85 +58,140 @@ const Calendar: React.FC = () => {
|
|||||||
extAccount: '',
|
extAccount: '',
|
||||||
experimentMode: experimentModes[0],
|
experimentMode: experimentModes[0],
|
||||||
});
|
});
|
||||||
const [shipments, setShipments] = useState<any[]>([]); // State for shipments
|
const [shipments, setShipments] = useState<any[]>([]);
|
||||||
const [selectedDewars, setSelectedDewars] = useState<string[]>([]); // Track selected dewars for the experiment
|
|
||||||
|
|
||||||
|
// Load all beamtime events AND their current associations (on mount)
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
const fetchEvents = async () => {
|
const fetchAll = async () => {
|
||||||
|
setIsLoading(true);
|
||||||
|
setFetchError(null);
|
||||||
try {
|
try {
|
||||||
const response = await fetch('/beamtimedb.json');
|
const beamtimes = await BeamtimesService.getMyBeamtimesProtectedBeamtimesMyBeamtimesGet();
|
||||||
const data = await response.json();
|
console.log('Loaded beamtimes:', beamtimes);
|
||||||
const events: CustomEvent[] = [];
|
const grouped: { [key: string]: any[] } = {};
|
||||||
|
beamtimes.forEach((beamtime: any) => {
|
||||||
|
const key = `${beamtime.start_date}|${beamtime.beamline}|${beamtime.pgroups}`;
|
||||||
|
if (!grouped[key]) grouped[key] = [];
|
||||||
|
grouped[key].push(beamtime);
|
||||||
|
});
|
||||||
|
|
||||||
data.beamtimes.forEach((beamtime: any) => {
|
const formattedEvents: CustomEvent[] = Object.values(grouped).map((group) => {
|
||||||
const date = new Date(beamtime.date);
|
const shifts = group.map((bt: any) => bt.shift).join(" + ");
|
||||||
beamtime.shifts.forEach((shift: any) => {
|
const ids = group.map((bt: any) => bt.id);
|
||||||
const beamline = shift.beamline || 'Unknown';
|
const first = group[0];
|
||||||
const beamtime_shift = shift.beamtime_shift || 'morning';
|
console.log(`[DEBUG] pgroups: ${first.pgroups}`); // Ensure the value of pgroups here is correct
|
||||||
|
return {
|
||||||
|
id: `${first.beamline}-${first.start_date}-${first.pgroups}`,
|
||||||
|
title: `${first.beamline}: ${shifts}`,
|
||||||
|
start: first.start_date,
|
||||||
|
end: first.end_date,
|
||||||
|
beamtime_ids: ids,
|
||||||
|
beamline: first.beamline || 'Unknown',
|
||||||
|
beamtime_shift: shifts,
|
||||||
|
backgroundColor: beamlineColors[first.beamline] || beamlineColors.Unknown,
|
||||||
|
borderColor: '#000',
|
||||||
|
textColor: '#fff',
|
||||||
|
beamtimes: group,
|
||||||
|
extendedProps: {
|
||||||
|
pgroups: first.pgroups, // Check that this is a valid, comma-separated string
|
||||||
|
},
|
||||||
|
};
|
||||||
|
});
|
||||||
|
setEvents(formattedEvents);
|
||||||
|
|
||||||
const event: CustomEvent = {
|
|
||||||
id: `${beamline}-${date.toISOString()}-${beamtime_shift}`,
|
// Fetch associations for all
|
||||||
start: new Date(date.setHours(0, 0, 0)),
|
const assoc: { [id: string]: { dewars: string[]; pucks: string[] } } = {};
|
||||||
end: new Date(date.setHours(23, 59, 59)),
|
console.log('Fetched associations after loading events:', assoc);
|
||||||
title: `${beamline}: ${beamtime_shift}`,
|
|
||||||
beamline,
|
await Promise.all(
|
||||||
beamtime_shift,
|
Object.values(grouped).map(async (group) => {
|
||||||
isSubmitted: false,
|
// multiple (or single) beamtimes per group
|
||||||
|
const ids = group.map((bt: any) => bt.id);
|
||||||
|
// fetch and merge for all ids in this group:
|
||||||
|
let dewarsSet = new Set<string>();
|
||||||
|
let pucksSet = new Set<string>();
|
||||||
|
await Promise.all(
|
||||||
|
ids.map(async (beamtimeId: number) => {
|
||||||
|
const [dewars, pucks] = await Promise.all([
|
||||||
|
DewarsService.getDewarsByBeamtime(beamtimeId),
|
||||||
|
PucksService.getPucksByBeamtime(beamtimeId),
|
||||||
|
]);
|
||||||
|
console.log(`Dewars for beamtime ${beamtimeId}:`, dewars);
|
||||||
|
console.log(`Pucks for beamtime ${beamtimeId}:`, pucks);
|
||||||
|
dewars.forEach((d: any) => dewarsSet.add(d.id));
|
||||||
|
pucks.forEach((p: any) => pucksSet.add(p.id));
|
||||||
|
})
|
||||||
|
);
|
||||||
|
// key must match event id
|
||||||
|
const eventId = `${group[0].beamline}-${group[0].start_date}-${group[0].pgroups}`;
|
||||||
|
assoc[eventId] = {
|
||||||
|
dewars: Array.from(dewarsSet),
|
||||||
|
pucks: Array.from(pucksSet),
|
||||||
};
|
};
|
||||||
|
})
|
||||||
|
);
|
||||||
|
console.log("Final eventAssociations:", assoc);
|
||||||
|
setEventAssociations(assoc);
|
||||||
|
|
||||||
events.push(event);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
console.log('Fetched events array:', events);
|
|
||||||
setEvents(events);
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Error fetching events:', error);
|
setFetchError('Failed to load beamtime data. Please try again later.');
|
||||||
|
setEvents([]);
|
||||||
|
setEventAssociations({});
|
||||||
|
} finally {
|
||||||
|
setIsLoading(false);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
fetchAll();
|
||||||
const fetchShipments = async () => {
|
|
||||||
try {
|
|
||||||
const response = await fetch('/shipmentdb.json');
|
|
||||||
|
|
||||||
// Check for HTTP errors
|
|
||||||
if (!response.ok) {
|
|
||||||
throw new Error(`HTTP error! status: ${response.status}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Parse the JSON response
|
|
||||||
const data = await response.json();
|
|
||||||
|
|
||||||
const availableDewars: any[] = [];
|
|
||||||
|
|
||||||
data.shipments.forEach(shipment => {
|
|
||||||
if (shipment.shipment_status === "In Transit") {
|
|
||||||
shipment.dewars.forEach(dewar => {
|
|
||||||
if (dewar.shippingStatus === "shipped" && dewar.returned === "") {
|
|
||||||
availableDewars.push(dewar);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
console.log('Available Dewars:', availableDewars);
|
|
||||||
setShipments(availableDewars);
|
|
||||||
} catch (error) {
|
|
||||||
console.error('Error fetching shipments:', error);
|
|
||||||
// Optionally display the error to the user in the UI
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
fetchEvents();
|
|
||||||
fetchShipments();
|
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
|
// When an event is selected, fetch up-to-date dewar list
|
||||||
|
useEffect(() => {
|
||||||
|
if (eventDetails) {
|
||||||
|
const fetchDewars = async () => {
|
||||||
|
try {
|
||||||
|
const dewarsWithPucks = await DewarsService.getRecentDewarsWithPucks();
|
||||||
|
setShipments(dewarsWithPucks);
|
||||||
|
} catch (err) {
|
||||||
|
setShipments([]);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
fetchDewars();
|
||||||
|
} else {
|
||||||
|
setShipments([]);
|
||||||
|
}
|
||||||
|
}, [eventDetails]);
|
||||||
|
|
||||||
|
// Refresh associations after (un)assign action
|
||||||
|
const refetchEventAssociations = async (beamtimeIds: number[], eventId: string) => {
|
||||||
|
let dewarsSet = new Set<string>();
|
||||||
|
let pucksSet = new Set<string>();
|
||||||
|
await Promise.all(
|
||||||
|
beamtimeIds.map(async (beamtimeId: number) => {
|
||||||
|
const [dewars, pucks] = await Promise.all([
|
||||||
|
DewarsService.getDewarsByBeamtime(beamtimeId),
|
||||||
|
PucksService.getPucksByBeamtime(beamtimeId),
|
||||||
|
]);
|
||||||
|
dewars.forEach((d: any) => dewarsSet.add(d.id));
|
||||||
|
pucks.forEach((p: any) => pucksSet.add(p.id));
|
||||||
|
})
|
||||||
|
);
|
||||||
|
setEventAssociations(prev => ({
|
||||||
|
...prev,
|
||||||
|
[eventId]: {
|
||||||
|
dewars: Array.from(dewarsSet),
|
||||||
|
pucks: Array.from(pucksSet),
|
||||||
|
}
|
||||||
|
}));
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
const handleEventClick = (eventInfo: any) => {
|
const handleEventClick = (eventInfo: any) => {
|
||||||
const clickedEventId = eventInfo.event.id;
|
const clickedEventId = eventInfo.event.id;
|
||||||
setSelectedEventId(clickedEventId);
|
setSelectedEventId(clickedEventId);
|
||||||
|
const selected = events.find(event => event.id === clickedEventId) || null;
|
||||||
const selectedEvent = events.find(event => event.id === clickedEventId) || null;
|
setEventDetails(selected);
|
||||||
setEventDetails(selectedEvent);
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const handleInputChange = (e: React.ChangeEvent<HTMLInputElement | HTMLSelectElement>) => {
|
const handleInputChange = (e: React.ChangeEvent<HTMLInputElement | HTMLSelectElement>) => {
|
||||||
@ -137,32 +202,15 @@ const Calendar: React.FC = () => {
|
|||||||
}));
|
}));
|
||||||
};
|
};
|
||||||
|
|
||||||
const handleDewarSelection = (dewarId: string) => {
|
|
||||||
setSelectedDewars(prevSelectedDewars => {
|
|
||||||
if (prevSelectedDewars.includes(dewarId)) {
|
|
||||||
return prevSelectedDewars.filter(id => id !== dewarId); // Remove if already selected
|
|
||||||
} else {
|
|
||||||
return [...prevSelectedDewars, dewarId]; // Add if not selected
|
|
||||||
}
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
const handleSubmit = (e: React.FormEvent) => {
|
const handleSubmit = (e: React.FormEvent) => {
|
||||||
e.preventDefault();
|
e.preventDefault();
|
||||||
|
|
||||||
if (eventDetails) {
|
if (eventDetails) {
|
||||||
const updatedEvents = events.map(event =>
|
setEvents(prev =>
|
||||||
event.id === eventDetails.id
|
prev.map(event =>
|
||||||
? { ...event, isSubmitted: true, selectedDewars } // Associate selected dewars
|
event.id === eventDetails.id ? { ...event, isSubmitted: true } : event
|
||||||
: event
|
)
|
||||||
);
|
);
|
||||||
setEvents(updatedEvents);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log('User Details:', userDetails);
|
|
||||||
console.log('Selected Dewars:', selectedDewars);
|
|
||||||
|
|
||||||
// Reset user details and selected dewars after submission
|
|
||||||
setUserDetails({
|
setUserDetails({
|
||||||
name: '',
|
name: '',
|
||||||
firstName: '',
|
firstName: '',
|
||||||
@ -171,20 +219,64 @@ const Calendar: React.FC = () => {
|
|||||||
extAccount: '',
|
extAccount: '',
|
||||||
experimentMode: experimentModes[0],
|
experimentMode: experimentModes[0],
|
||||||
});
|
});
|
||||||
setSelectedDewars([]); // Reset selected dewars
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Unified assign/unassign for Dewars
|
||||||
|
const handleDewarAssignment = async (dewarId: string) => {
|
||||||
|
if (!selectedEventId) return;
|
||||||
|
const event = events.find(e => e.id === selectedEventId)!;
|
||||||
|
const beamtimeIds: number[] = event.beamtime_ids || [];
|
||||||
|
if (!beamtimeIds.length) return;
|
||||||
|
const assigned = eventAssociations[selectedEventId]?.dewars.includes(dewarId);
|
||||||
|
try {
|
||||||
|
await Promise.all(
|
||||||
|
beamtimeIds.map(btId =>
|
||||||
|
assigned
|
||||||
|
? DewarsService.assignDewarToBeamtime(Number(dewarId), 0)
|
||||||
|
: DewarsService.assignDewarToBeamtime(Number(dewarId), Number(btId))
|
||||||
|
)
|
||||||
|
);
|
||||||
|
await refetchEventAssociations(beamtimeIds, selectedEventId);
|
||||||
|
} catch (e) {
|
||||||
|
/* error handling */}
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
// Unified assign/unassign for Pucks
|
||||||
|
const handlePuckAssignment = async (puckId: string) => {
|
||||||
|
if (!selectedEventId) return;
|
||||||
|
const event = events.find(e => e.id === selectedEventId)!;
|
||||||
|
const beamtimeIds: number[] = event.beamtime_ids || [];
|
||||||
|
if (!beamtimeIds.length) return;
|
||||||
|
const assigned = eventAssociations[selectedEventId]?.pucks.includes(puckId);
|
||||||
|
try {
|
||||||
|
await Promise.all(
|
||||||
|
beamtimeIds.map(async btId =>
|
||||||
|
assigned
|
||||||
|
? PucksService.assignPuckToBeamtime(Number(puckId), 0)
|
||||||
|
: PucksService.assignPuckToBeamtime(Number(puckId), Number(btId))
|
||||||
|
)
|
||||||
|
);
|
||||||
|
await refetchEventAssociations(beamtimeIds, selectedEventId);
|
||||||
|
} catch (e) {/* error handling */}
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
// For displaying badge in calendar and UI
|
||||||
const eventContent = (eventInfo: any) => {
|
const eventContent = (eventInfo: any) => {
|
||||||
|
const beamtimesInGroup = eventInfo.event.extendedProps.beamtimes
|
||||||
|
? eventInfo.event.extendedProps.beamtimes.length
|
||||||
|
: 1;
|
||||||
|
const minHeight = beamtimesInGroup * 26;
|
||||||
const beamline = eventInfo.event.extendedProps.beamline || 'Unknown';
|
const beamline = eventInfo.event.extendedProps.beamline || 'Unknown';
|
||||||
const isSelected = selectedEventId === eventInfo.event.id;
|
const isSelected = selectedEventId === eventInfo.event.id;
|
||||||
const isSubmitted = eventInfo.event.extendedProps.isSubmitted;
|
const isSubmitted = eventInfo.event.extendedProps.isSubmitted;
|
||||||
|
const assoc = eventAssociations[eventInfo.event.id] || { dewars: [], pucks: [] };
|
||||||
const backgroundColor = isSubmitted
|
const backgroundColor = isSubmitted
|
||||||
? darkenColor(beamlineColors[beamline] || beamlineColors.Unknown, -20)
|
? darkenColor(beamlineColors[beamline] || beamlineColors.Unknown, -20)
|
||||||
: isSelected
|
: isSelected
|
||||||
? '#FFD700'
|
? '#FFD700'
|
||||||
: (beamlineColors[beamline] || beamlineColors.Unknown);
|
: (beamlineColors[beamline] || beamlineColors.Unknown);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div
|
<div
|
||||||
style={{
|
style={{
|
||||||
@ -193,19 +285,85 @@ const Calendar: React.FC = () => {
|
|||||||
border: isSelected ? '2px solid black' : 'none',
|
border: isSelected ? '2px solid black' : 'none',
|
||||||
borderRadius: '3px',
|
borderRadius: '3px',
|
||||||
display: 'flex',
|
display: 'flex',
|
||||||
justifyContent: 'center',
|
justifyContent: 'space-between',
|
||||||
alignItems: 'center',
|
alignItems: 'center',
|
||||||
height: '100%',
|
height: '100%',
|
||||||
width: '100%',
|
width: '100%',
|
||||||
cursor: 'pointer',
|
cursor: 'pointer',
|
||||||
overflow: 'hidden',
|
overflow: 'hidden',
|
||||||
boxSizing: 'border-box',
|
boxSizing: 'border-box',
|
||||||
|
padding: '0 6px',
|
||||||
|
minHeight: `${minHeight}px`,
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
|
<span style={{ whiteSpace: 'nowrap', overflow: 'hidden', textOverflow: 'ellipsis' }}>
|
||||||
{eventInfo.event.title}
|
{eventInfo.event.title}
|
||||||
|
</span>
|
||||||
|
<span style={{ display: 'flex', alignItems: 'center', gap: 6, marginLeft: 8 }}>
|
||||||
|
<span title="Dewars" style={{ display: 'flex', alignItems: 'center', fontSize: 13 }}>
|
||||||
|
🧊
|
||||||
|
<span style={{
|
||||||
|
background: 'rgba(0,0,0,0.45)',
|
||||||
|
borderRadius: '8px',
|
||||||
|
marginLeft: 2,
|
||||||
|
minWidth: 14,
|
||||||
|
color: '#fff',
|
||||||
|
fontSize: 12,
|
||||||
|
padding: '0 4px',
|
||||||
|
fontWeight: 600,
|
||||||
|
textAlign: 'center'
|
||||||
|
}}>{assoc.dewars.length}</span>
|
||||||
|
</span>
|
||||||
|
<span title="Pucks" style={{ display: 'flex', alignItems: 'center', fontSize: 13 }}>
|
||||||
|
⚪
|
||||||
|
<span style={{
|
||||||
|
background: 'rgba(0,0,0,0.45)',
|
||||||
|
borderRadius: '8px',
|
||||||
|
marginLeft: 2,
|
||||||
|
minWidth: 14,
|
||||||
|
color: '#fff',
|
||||||
|
fontSize: 12,
|
||||||
|
padding: '0 4px',
|
||||||
|
fontWeight: 600,
|
||||||
|
textAlign: 'center'
|
||||||
|
}}>{assoc.pucks.length}</span>
|
||||||
|
</span>
|
||||||
|
{eventInfo.event.extendedProps?.pgroups && eventInfo.event.extendedProps.pgroups.split(',')
|
||||||
|
.map((pgroup: string) => (
|
||||||
|
<Chip
|
||||||
|
key={pgroup.trim()}
|
||||||
|
label={pgroup.trim()}
|
||||||
|
size="small"
|
||||||
|
sx={{
|
||||||
|
marginLeft: 0.5,
|
||||||
|
marginRight: 0.5,
|
||||||
|
backgroundColor: pgroup.trim() === activePgroup ? '#19d238' : '#b0b0b0',
|
||||||
|
color: pgroup.trim() === activePgroup ? 'white' : 'black',
|
||||||
|
fontWeight: 'bold',
|
||||||
|
borderRadius: '8px',
|
||||||
|
height: '20px',
|
||||||
|
fontSize: '12px',
|
||||||
|
boxShadow: '0px 1px 3px rgba(0, 0, 0, 0.2)',
|
||||||
|
mr: 1,
|
||||||
|
mb: 1,
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
))
|
||||||
|
}
|
||||||
|
</span>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
function getAssignedEventForDewar(dewarId: string) {
|
||||||
|
return Object.entries(eventAssociations).find(([eid, assoc]) =>
|
||||||
|
assoc.dewars.includes(dewarId)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
function getAssignedEventForPuck(puckId: string) {
|
||||||
|
return Object.entries(eventAssociations).find(([eid, assoc]) =>
|
||||||
|
assoc.pucks.includes(puckId)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="calendar-container">
|
<div className="calendar-container">
|
||||||
@ -232,17 +390,85 @@ const Calendar: React.FC = () => {
|
|||||||
|
|
||||||
<h4>Select Dewars</h4>
|
<h4>Select Dewars</h4>
|
||||||
<ul>
|
<ul>
|
||||||
{shipments.map(dewar => (
|
{shipments.map(dewar => {
|
||||||
<li key={dewar.id}>
|
const thisEvent = eventAssociations[selectedEventId!] || { dewars: [], pucks: [] };
|
||||||
<input
|
const dewarAssigned = thisEvent.dewars.includes(dewar.id);
|
||||||
type="checkbox"
|
|
||||||
id={dewar.id}
|
const [assocEventId, assoc] = getAssignedEventForDewar(dewar.id) || [];
|
||||||
checked={selectedDewars.includes(dewar.id)}
|
const assocEvent = assocEventId
|
||||||
onChange={() => handleDewarSelection(dewar.id)}
|
? events.find(ev => ev.id === assocEventId)
|
||||||
/>
|
: null;
|
||||||
<label htmlFor={dewar.id}>{dewar.dewar_name} (Pucks: {dewar.number_of_pucks})</label>
|
const assocShift = assocEvent?.beamtime_shift;
|
||||||
</li>
|
const assocDate = assocEvent?.start;
|
||||||
))}
|
const assocBeamline = assocEvent?.beamline;
|
||||||
|
|
||||||
|
const currentShift = eventDetails?.beamtime_shift;
|
||||||
|
const isAssignedToThis = assocShift && currentShift && assocShift === currentShift;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<li key={dewar.id}>
|
||||||
|
<label>
|
||||||
|
<input
|
||||||
|
type="checkbox"
|
||||||
|
checked={dewarAssigned}
|
||||||
|
onChange={() => handleDewarAssignment(dewar.id)}
|
||||||
|
/>
|
||||||
|
<b>{dewar.dewar_name}</b>
|
||||||
|
</label>
|
||||||
|
{/* List all pucks in this Dewar, each with assign button */}
|
||||||
|
{Array.isArray(dewar.pucks) && dewar.pucks.length > 0 && (
|
||||||
|
<ul>
|
||||||
|
{dewar.pucks.map(puck => {
|
||||||
|
const [pAssocEventId] = getAssignedEventForPuck(puck.id) || [];
|
||||||
|
const pAssocEvent = pAssocEventId
|
||||||
|
? events.find(ev => ev.id === pAssocEventId)
|
||||||
|
: null;
|
||||||
|
const pAssocShift = pAssocEvent?.beamtime_shift;
|
||||||
|
const pAssocDate = pAssocEvent?.start;
|
||||||
|
const pAssocBeamline = pAssocEvent?.beamline;
|
||||||
|
const isAssignedHere = pAssocShift && currentShift && pAssocShift === currentShift;
|
||||||
|
return (
|
||||||
|
<li key={puck.id} style={{marginLeft:8}}>
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
style={{
|
||||||
|
background: isAssignedHere ? '#4CAF50' : (pAssocShift ? '#B3E5B3' : '#e0e0e0'),
|
||||||
|
color: isAssignedHere ? 'white' : 'black',
|
||||||
|
border: isAssignedHere ? '1px solid #388e3c' : '1px solid #bdbdbd',
|
||||||
|
borderRadius: 4,
|
||||||
|
padding: '2px 10px',
|
||||||
|
cursor: 'pointer',
|
||||||
|
transition: 'background 0.2s',
|
||||||
|
}}
|
||||||
|
onClick={() => handlePuckAssignment(puck.id)}
|
||||||
|
>
|
||||||
|
{puck.puck_name || puck.name}
|
||||||
|
</button>
|
||||||
|
{pAssocEvent && (
|
||||||
|
<span style={{
|
||||||
|
marginLeft: 8,
|
||||||
|
color: isAssignedHere ? 'green' : '#388e3c',
|
||||||
|
fontWeight: isAssignedHere ? 700 : 400
|
||||||
|
}}>
|
||||||
|
← Assigned to: {pAssocShift} {pAssocDate && <>on {new Date(pAssocDate).toLocaleDateString()}</>} {pAssocBeamline && <>({pAssocBeamline})</>}
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
</li>
|
||||||
|
);
|
||||||
|
})}
|
||||||
|
</ul>
|
||||||
|
)}
|
||||||
|
{/* Show dewar assignment info if not to this shift */}
|
||||||
|
{assocEvent && (
|
||||||
|
<span style={{marginLeft:8, color:isAssignedToThis?'green':'#388e3c', fontWeight:isAssignedToThis?700:400}}>
|
||||||
|
← Assigned to: {assocShift}
|
||||||
|
{assocDate && <> on {new Date(assocDate).toLocaleDateString()}</>}
|
||||||
|
{assocBeamline && <> ({assocBeamline})</>}
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
</li>
|
||||||
|
);
|
||||||
|
})}
|
||||||
</ul>
|
</ul>
|
||||||
|
|
||||||
<h4>User Information</h4>
|
<h4>User Information</h4>
|
||||||
@ -318,4 +544,4 @@ const Calendar: React.FC = () => {
|
|||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
export default Calendar;
|
export default Calendar;
|
@ -3,16 +3,19 @@ import { Navigate } from 'react-router-dom';
|
|||||||
|
|
||||||
interface ProtectedRouteProps {
|
interface ProtectedRouteProps {
|
||||||
element: JSX.Element;
|
element: JSX.Element;
|
||||||
|
[key: string]: any; // Allow additional props
|
||||||
}
|
}
|
||||||
|
|
||||||
const ProtectedRoute: React.FC<ProtectedRouteProps> = ({ element }) => {
|
const ProtectedRoute: React.FC<ProtectedRouteProps> = ({ element, ...rest }) => {
|
||||||
const isAuthenticated = () => {
|
const isAuthenticated = () => {
|
||||||
const token = localStorage.getItem('token');
|
const token = localStorage.getItem('token');
|
||||||
console.log("Is Authenticated: ", token !== null);
|
console.log("Is Authenticated: ", token !== null);
|
||||||
return token !== null;
|
return token !== null;
|
||||||
};
|
};
|
||||||
|
|
||||||
return isAuthenticated() ? element : <Navigate to="/login" />;
|
return isAuthenticated()
|
||||||
|
? React.cloneElement(element, { ...rest }) // Pass all additional props
|
||||||
|
: <Navigate to="/login" />;
|
||||||
};
|
};
|
||||||
|
|
||||||
export default ProtectedRoute;
|
export default ProtectedRoute;
|
@ -1,4 +1,4 @@
|
|||||||
import React, { useState } from 'react';
|
import React, { useState, useEffect } from 'react';
|
||||||
import { useNavigate, useLocation } from 'react-router-dom';
|
import { useNavigate, useLocation } from 'react-router-dom';
|
||||||
import AppBar from '@mui/material/AppBar';
|
import AppBar from '@mui/material/AppBar';
|
||||||
import Box from '@mui/material/Box';
|
import Box from '@mui/material/Box';
|
||||||
@ -38,6 +38,12 @@ const ResponsiveAppBar: React.FC<ResponsiveAppBarProps> = ({
|
|||||||
const [anchorElNav, setAnchorElNav] = useState<null | HTMLElement>(null);
|
const [anchorElNav, setAnchorElNav] = useState<null | HTMLElement>(null);
|
||||||
const [anchorElUser, setAnchorElUser] = useState<null | HTMLElement>(null);
|
const [anchorElUser, setAnchorElUser] = useState<null | HTMLElement>(null);
|
||||||
const [selectedPgroup, setSelectedPgroup] = useState(currentPgroup);
|
const [selectedPgroup, setSelectedPgroup] = useState(currentPgroup);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
setSelectedPgroup(currentPgroup); // Sync local state with the global activePgroup
|
||||||
|
}, [currentPgroup]);
|
||||||
|
|
||||||
|
|
||||||
console.log('Active Pgroup:', activePgroup);
|
console.log('Active Pgroup:', activePgroup);
|
||||||
const handlePgroupChange = (event: React.ChangeEvent<{ value: unknown }>) => {
|
const handlePgroupChange = (event: React.ChangeEvent<{ value: unknown }>) => {
|
||||||
const newPgroup = event.target.value as string;
|
const newPgroup = event.target.value as string;
|
||||||
|
@ -1,9 +1,16 @@
|
|||||||
import React, { useEffect, useState } from 'react';
|
import React, { useEffect, useState, useRef } from 'react';
|
||||||
import { DataGridPremium, GridColDef } from '@mui/x-data-grid-premium';
|
import { DataGridPremium, GridColDef } from '@mui/x-data-grid-premium';
|
||||||
import RunDetails from './RunDetails';
|
import RunDetails from './RunDetails';
|
||||||
import './SampleImage.css';
|
import './SampleImage.css';
|
||||||
import './ResultGrid.css';
|
import './ResultGrid.css';
|
||||||
import { OpenAPI, SamplesService } from '../../openapi';
|
import { OpenAPI, SamplesService } from '../../openapi';
|
||||||
|
import ScheduleIcon from '@mui/icons-material/Schedule';
|
||||||
|
import DoDisturbIcon from '@mui/icons-material/DoDisturb';
|
||||||
|
import TaskAltIcon from '@mui/icons-material/TaskAlt';
|
||||||
|
import ErrorOutlineIcon from '@mui/icons-material/ErrorOutline';
|
||||||
|
import InfoOutlinedIcon from '@mui/icons-material/InfoOutlined';
|
||||||
|
import HourglassEmptyIcon from '@mui/icons-material/HourglassEmpty';
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
// Extend your image info interface if needed.
|
// Extend your image info interface if needed.
|
||||||
@ -95,17 +102,103 @@ interface TreeRow {
|
|||||||
beamline_parameters?: ExperimentParameters['beamline_parameters'];
|
beamline_parameters?: ExperimentParameters['beamline_parameters'];
|
||||||
experimentType?: string;
|
experimentType?: string;
|
||||||
numberOfImages?: number;
|
numberOfImages?: number;
|
||||||
|
hasResults: boolean;
|
||||||
|
jobStatus?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
interface ResultGridProps {
|
interface ResultGridProps {
|
||||||
activePgroup: string;
|
activePgroup: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const useJobStream = (onJobs: (jobs: any[]) => void) => {
|
||||||
|
const eventSourceRef = useRef<EventSource | null>(null);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
eventSourceRef.current = new EventSource(`${OpenAPI.BASE}/processing/jobs/stream`);
|
||||||
|
eventSourceRef.current.onmessage = async (event) => {
|
||||||
|
const jobs = JSON.parse(event.data); // Updated job data
|
||||||
|
|
||||||
|
onJobs(jobs);
|
||||||
|
};
|
||||||
|
|
||||||
|
return () => {
|
||||||
|
if (eventSourceRef.current) {
|
||||||
|
eventSourceRef.current.close();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}, [onJobs]);
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
const ResultGrid: React.FC<ResultGridProps> = ({ activePgroup }) => {
|
const ResultGrid: React.FC<ResultGridProps> = ({ activePgroup }) => {
|
||||||
const [rows, setRows] = useState<TreeRow[]>([]);
|
const [rows, setRows] = useState<TreeRow[]>([]);
|
||||||
const [basePath, setBasePath] = useState('');
|
const [basePath, setBasePath] = useState('');
|
||||||
const [detailPanelHeights, setDetailPanelHeights] = useState<{ [key: string]: number }>({}); // Store dynamic heights
|
const [detailPanelHeights, setDetailPanelHeights] = useState<{ [key: string]: number }>({}); // Store dynamic heights
|
||||||
|
const [jobStatusMap, setJobStatusMap] = useState<{ [runId: number]: string }>({});
|
||||||
|
|
||||||
|
const getStatusIcon = (status: string, hasResults: boolean = false) => {
|
||||||
|
switch (status) {
|
||||||
|
case 'todo':
|
||||||
|
return <ScheduleIcon color="action" titleAccess="Todo" />;
|
||||||
|
case 'submitted':
|
||||||
|
return <HourglassEmptyIcon color="primary" className="spin" titleAccess="Submitted" />;
|
||||||
|
case 'completed':
|
||||||
|
return hasResults ? (
|
||||||
|
<TaskAltIcon color="success" titleAccess="Completed" />
|
||||||
|
) : (
|
||||||
|
<InfoOutlinedIcon color="warning" titleAccess="Completed - No Results" />
|
||||||
|
);
|
||||||
|
case 'failed':
|
||||||
|
return <ErrorOutlineIcon color="error" titleAccess="Failed" />;
|
||||||
|
case 'cancelled':
|
||||||
|
return <DoDisturbIcon color="disabled" titleAccess="Cancelled" />;
|
||||||
|
case 'no job':
|
||||||
|
default:
|
||||||
|
return <InfoOutlinedIcon color="disabled" titleAccess="No job" />;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
useJobStream((jobs) => {
|
||||||
|
const map: { [runId: number]: string } = {};
|
||||||
|
for (const job of jobs) {
|
||||||
|
// Map job status by run_id (or job_id as preferred)
|
||||||
|
map[job.run_id] = job.status;
|
||||||
|
}
|
||||||
|
setJobStatusMap(map);
|
||||||
|
});
|
||||||
|
|
||||||
|
const handleJobs = async (jobs: any[]) => {
|
||||||
|
console.log('Jobs received from the job stream:', jobs);
|
||||||
|
|
||||||
|
// Fetch results for each run based on the job stream
|
||||||
|
const updatedRows = await Promise.all(
|
||||||
|
rows.map(async (row) => {
|
||||||
|
if (row.type === 'run' && row.experimentId) {
|
||||||
|
try {
|
||||||
|
const results = await SamplesService.getResultsForRunAndSample(
|
||||||
|
row.sample_id,
|
||||||
|
row.experimentId
|
||||||
|
);
|
||||||
|
const hasResults = results.length > 0;
|
||||||
|
console.log(`Fetching results for experimentId: ${row.experimentId}, hasResults: ${hasResults}`);
|
||||||
|
return { ...row, hasResults }; // Update `hasResults` for the run
|
||||||
|
} catch (error) {
|
||||||
|
console.error(`Error fetching results for experimentId: ${row.experimentId}`, error);
|
||||||
|
return row; // Return unchanged row on error
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return row; // Return unchanged for non-run rows
|
||||||
|
})
|
||||||
|
);
|
||||||
|
|
||||||
|
// Update the rows state with new `hasResults` values
|
||||||
|
setRows(updatedRows);
|
||||||
|
};
|
||||||
|
|
||||||
|
useJobStream(handleJobs);
|
||||||
|
|
||||||
|
|
||||||
const hasProcessingResults = (row: TreeRow): boolean => {
|
const hasProcessingResults = (row: TreeRow): boolean => {
|
||||||
// You can later replace this placeholder with actual logic.
|
// You can later replace this placeholder with actual logic.
|
||||||
@ -216,8 +309,8 @@ const ResultGrid: React.FC<ResultGridProps> = ({ activePgroup }) => {
|
|||||||
beamline_parameters: run.beamline_parameters,
|
beamline_parameters: run.beamline_parameters,
|
||||||
experimentType,
|
experimentType,
|
||||||
numberOfImages: numImages,
|
numberOfImages: numImages,
|
||||||
images: sample.images.filter(img =>
|
images: sample.images.filter(img => img.event_type === "Collecting"),
|
||||||
img.event_type === "Collecting" ),
|
hasResults: false, // Default to false until verified
|
||||||
};
|
};
|
||||||
treeRows.push(runRow);
|
treeRows.push(runRow);
|
||||||
});
|
});
|
||||||
@ -238,6 +331,32 @@ const ResultGrid: React.FC<ResultGridProps> = ({ activePgroup }) => {
|
|||||||
headerName: 'Sample Name',
|
headerName: 'Sample Name',
|
||||||
width: 200,
|
width: 200,
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
field: 'jobStatus',
|
||||||
|
headerName: 'Job Status',
|
||||||
|
width: 120,
|
||||||
|
renderCell: (params) => {
|
||||||
|
if (params.row.type === 'run') {
|
||||||
|
const hasResults = params.row.hasResults || false; // Check for results
|
||||||
|
const jobStatus = jobStatusMap[params.row.experimentId] || 'no job'; // Fetch job status
|
||||||
|
|
||||||
|
// If there are results, only show the TaskAltIcon (no need for job status tracking)
|
||||||
|
if (hasResults) {
|
||||||
|
return (
|
||||||
|
<div style={{ display: 'flex', alignItems: 'center', gap: '5px' }}>
|
||||||
|
<TaskAltIcon color="success" titleAccess="Results available" />
|
||||||
|
<span style={{ fontSize: '0.75rem', color: '#4caf50' }}>Results</span>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Otherwise, show the job tracking status icon
|
||||||
|
return getStatusIcon(jobStatus, hasResults);
|
||||||
|
}
|
||||||
|
return null; // No rendering for non-run rows
|
||||||
|
},
|
||||||
|
}
|
||||||
|
,
|
||||||
{
|
{
|
||||||
field: 'puck_name',
|
field: 'puck_name',
|
||||||
headerName: 'Puck Name',
|
headerName: 'Puck Name',
|
||||||
@ -304,6 +423,20 @@ const ResultGrid: React.FC<ResultGridProps> = ({ activePgroup }) => {
|
|||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const handleResultsFetched = (runId: number, hasResults: boolean) => {
|
||||||
|
console.log(`handleResultsFetched called for RunId ${runId}, hasResults: ${hasResults}`);
|
||||||
|
setRows((prevRows) =>
|
||||||
|
prevRows.map((row) => {
|
||||||
|
if (row.type === 'run' && row.experimentId === runId) {
|
||||||
|
console.log(`Updating row for runId ${runId}, setting hasResults=${hasResults}`);
|
||||||
|
return { ...row, hasResults };
|
||||||
|
}
|
||||||
|
return row;
|
||||||
|
})
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
const getDetailPanelContent = (params: any) => {
|
const getDetailPanelContent = (params: any) => {
|
||||||
if (params.row.type === 'run') {
|
if (params.row.type === 'run') {
|
||||||
return (
|
return (
|
||||||
@ -312,13 +445,16 @@ const ResultGrid: React.FC<ResultGridProps> = ({ activePgroup }) => {
|
|||||||
runId={params.row.experimentId}
|
runId={params.row.experimentId}
|
||||||
sample_id={params.row.sample_id}
|
sample_id={params.row.sample_id}
|
||||||
basePath={basePath}
|
basePath={basePath}
|
||||||
onHeightChange={height => handleDetailPanelHeightChange(params.row.id, height)}
|
onHeightChange={(height) => handleDetailPanelHeightChange(params.row.id, height)}
|
||||||
|
onResultsFetched={(runId, hasResults) => handleResultsFetched(runId, hasResults)}
|
||||||
/>
|
/>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
return null;
|
return null;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
const getDetailPanelHeight = (params: any) => {
|
const getDetailPanelHeight = (params: any) => {
|
||||||
if (params.row.type === 'run') {
|
if (params.row.type === 'run') {
|
||||||
// Use the dynamically calculated height from state
|
// Use the dynamically calculated height from state
|
||||||
@ -331,6 +467,7 @@ const ResultGrid: React.FC<ResultGridProps> = ({ activePgroup }) => {
|
|||||||
|
|
||||||
return (
|
return (
|
||||||
<DataGridPremium
|
<DataGridPremium
|
||||||
|
key={JSON.stringify(rows)}
|
||||||
rows={rows}
|
rows={rows}
|
||||||
columns={columns}
|
columns={columns}
|
||||||
getRowId={(row) => row.id}
|
getRowId={(row) => row.id}
|
||||||
@ -361,4 +498,3 @@ const ResultGrid: React.FC<ResultGridProps> = ({ activePgroup }) => {
|
|||||||
};
|
};
|
||||||
|
|
||||||
export default ResultGrid;
|
export default ResultGrid;
|
||||||
|
|
||||||
|
@ -16,6 +16,7 @@ interface RunDetailsProps {
|
|||||||
sample_id: number;
|
sample_id: number;
|
||||||
basePath: string;
|
basePath: string;
|
||||||
onHeightChange?: (height: number) => void;
|
onHeightChange?: (height: number) => void;
|
||||||
|
onResultsFetched: (runId: number, hasResults: boolean) => void; // New callback
|
||||||
}
|
}
|
||||||
|
|
||||||
interface CCPoint {
|
interface CCPoint {
|
||||||
@ -38,13 +39,13 @@ interface ProcessingResults {
|
|||||||
resolution: number;
|
resolution: number;
|
||||||
unit_cell: string;
|
unit_cell: string;
|
||||||
spacegroup: string;
|
spacegroup: string;
|
||||||
rmerge: number;
|
rmerge: CCPoint[];
|
||||||
rmeas: number;
|
rmeas: CCPoint[];
|
||||||
isig: number;
|
isig: CCPoint[];
|
||||||
cc: CCPoint[];
|
cc: CCPoint[];
|
||||||
cchalf: CCPoint[];
|
cchalf: CCPoint[];
|
||||||
completeness: number;
|
completeness: CCPoint[];
|
||||||
multiplicity: number;
|
multiplicity: CCPoint[];
|
||||||
nobs: number;
|
nobs: number;
|
||||||
total_refl: number;
|
total_refl: number;
|
||||||
unique_refl: number;
|
unique_refl: number;
|
||||||
@ -52,7 +53,9 @@ interface ProcessingResults {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
const RunDetails: React.FC<RunDetailsProps> = ({ run, onHeightChange, basePath, runId, sample_id }) => {
|
const RunDetails: React.FC<RunDetailsProps> = ({ run, onHeightChange, basePath, runId, sample_id, onResultsFetched }) => {
|
||||||
|
console.log('onResultsFetched is available:', onResultsFetched);
|
||||||
|
|
||||||
const containerRef = useRef<HTMLDivElement | null>(null);
|
const containerRef = useRef<HTMLDivElement | null>(null);
|
||||||
const [currentHeight, setCurrentHeight] = useState<number>(0);
|
const [currentHeight, setCurrentHeight] = useState<number>(0);
|
||||||
const [modalOpen, setModalOpen] = useState<boolean>(false);
|
const [modalOpen, setModalOpen] = useState<boolean>(false);
|
||||||
@ -68,23 +71,23 @@ const RunDetails: React.FC<RunDetailsProps> = ({ run, onHeightChange, basePath,
|
|||||||
}, [runId]);
|
}, [runId]);
|
||||||
|
|
||||||
const fetchResults = async (sample_id: number, runId: number) => {
|
const fetchResults = async (sample_id: number, runId: number) => {
|
||||||
|
console.log(`Fetching results for sample_id: ${sample_id}, runId: ${runId}`);
|
||||||
try {
|
try {
|
||||||
const results = await SamplesService.getResultsForRunAndSample(sample_id, runId);
|
const results = await SamplesService.getResultsForRunAndSample(sample_id, runId);
|
||||||
|
|
||||||
// Explicitly handle nested results
|
|
||||||
const mappedResults: ProcessingResults[] = results.map((res): ProcessingResults => ({
|
const mappedResults: ProcessingResults[] = results.map((res): ProcessingResults => ({
|
||||||
id: res.id,
|
id: res.id,
|
||||||
pipeline: res.result?.pipeline || 'N/A',
|
pipeline: res.result?.pipeline || 'N/A',
|
||||||
resolution: res.result.resolution ?? 0,
|
resolution: res.result?.resolution ?? 0,
|
||||||
unit_cell: res.result?.unit_cell || 'N/A',
|
unit_cell: res.result?.unit_cell || 'N/A',
|
||||||
spacegroup: res.result?.spacegroup || 'N/A',
|
spacegroup: res.result?.spacegroup || 'N/A',
|
||||||
rmerge: res.result?.rmerge ?? 0,
|
rmerge: res.result?.rmerge || [],
|
||||||
rmeas: res.result?.rmeas ?? 0,
|
rmeas: res.result?.rmeas || [],
|
||||||
isig: res.result?.isig ?? 0,
|
isig: res.result?.isig || [],
|
||||||
cc: res.result?.cc || [],
|
cc: res.result?.cc || [],
|
||||||
cchalf: res.result?.cchalf || [],
|
cchalf: res.result?.cchalf || [],
|
||||||
completeness: res.result?.completeness ?? 0,
|
completeness: res.result?.completeness || [],
|
||||||
multiplicity: res.result?.multiplicity ?? 0,
|
multiplicity: res.result?.multiplicity || [],
|
||||||
nobs: res.result?.nobs ?? 0,
|
nobs: res.result?.nobs ?? 0,
|
||||||
total_refl: res.result?.total_refl ?? 0,
|
total_refl: res.result?.total_refl ?? 0,
|
||||||
unique_refl: res.result?.unique_refl ?? 0,
|
unique_refl: res.result?.unique_refl ?? 0,
|
||||||
@ -92,8 +95,13 @@ const RunDetails: React.FC<RunDetailsProps> = ({ run, onHeightChange, basePath,
|
|||||||
}));
|
}));
|
||||||
|
|
||||||
setProcessingResult(mappedResults);
|
setProcessingResult(mappedResults);
|
||||||
|
|
||||||
|
console.log(`Mapped results for runId ${runId}:`, mappedResults);
|
||||||
|
console.log(`Boolean value for hasResults: ${mappedResults.length > 0}`);
|
||||||
|
onResultsFetched(runId, mappedResults.length > 0);
|
||||||
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Error fetching results:', error);
|
console.error(`Error fetching results for RunId ${runId}:`, error);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -103,15 +111,45 @@ const RunDetails: React.FC<RunDetailsProps> = ({ run, onHeightChange, basePath,
|
|||||||
{ field: 'resolution', headerName: 'Resolution (Å)', flex: 1 },
|
{ field: 'resolution', headerName: 'Resolution (Å)', flex: 1 },
|
||||||
{ field: 'unit_cell', headerName: 'Unit Cell (Å)', flex: 1.5 },
|
{ field: 'unit_cell', headerName: 'Unit Cell (Å)', flex: 1.5 },
|
||||||
{ field: 'spacegroup', headerName: 'Spacegroup', flex: 1 },
|
{ field: 'spacegroup', headerName: 'Spacegroup', flex: 1 },
|
||||||
{ field: 'rmerge', headerName: 'Rmerge', flex: 1 },
|
{
|
||||||
{ field: 'rmeas', headerName: 'Rmeas', flex: 1 },
|
field: 'rmerge',
|
||||||
{ field: 'isig', headerName: 'I/sig(I)', flex: 1 },
|
headerName: 'Rmerge',
|
||||||
|
flex: 1,
|
||||||
|
valueGetter: (params: GridValueGetterParams<ProcessingResults, string>) =>
|
||||||
|
params.row?.rmerge
|
||||||
|
? Array.isArray(params.row.rmerge)
|
||||||
|
? params.row.rmerge.map((value: CCPoint) => `${value.value.toFixed(2)}@${value.resolution.toFixed(2)}`).join(', ')
|
||||||
|
: params.row.rmerge.toFixed(2)
|
||||||
|
: 'N/A',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
field: 'rmeas',
|
||||||
|
headerName: 'Rmeas',
|
||||||
|
flex: 1,
|
||||||
|
valueGetter: (params: GridValueGetterParams<ProcessingResults, string>) =>
|
||||||
|
params.row?.rmeas
|
||||||
|
? Array.isArray(params.row.rmeas)
|
||||||
|
? params.row.rmeas.map((value: CCPoint) => `${value.value.toFixed(2)}@${value.resolution.toFixed(2)}`).join(', ')
|
||||||
|
: params.row.rmeas.toFixed(2)
|
||||||
|
: 'N/A',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
field: 'isig',
|
||||||
|
headerName: 'I/sig(I)',
|
||||||
|
flex: 1,
|
||||||
|
valueGetter: (params: GridValueGetterParams<ProcessingResults, string>) =>
|
||||||
|
params.row?.isig
|
||||||
|
? Array.isArray(params.row.isig)
|
||||||
|
? params.row.isig.map((value: CCPoint) => `${value.value.toFixed(2)}@${value.resolution.toFixed(2)}`).join(', ')
|
||||||
|
: params.row.isig.toFixed(2)
|
||||||
|
: 'N/A',
|
||||||
|
},
|
||||||
{
|
{
|
||||||
field: 'cc',
|
field: 'cc',
|
||||||
headerName: 'CC',
|
headerName: 'CC',
|
||||||
flex: 1,
|
flex: 1,
|
||||||
valueGetter: (params: GridValueGetterParams<ProcessingResults, string>) =>
|
valueGetter: (params: GridValueGetterParams<ProcessingResults, string>) =>
|
||||||
Array.isArray(params.row?.cc)
|
params.row?.cc && Array.isArray(params.row.cc)
|
||||||
? params.row.cc.map((point: CCPoint) => `${point.value.toFixed(2)}@${point.resolution.toFixed(2)}`).join(', ')
|
? params.row.cc.map((point: CCPoint) => `${point.value.toFixed(2)}@${point.resolution.toFixed(2)}`).join(', ')
|
||||||
: '',
|
: '',
|
||||||
},
|
},
|
||||||
@ -120,18 +158,39 @@ const RunDetails: React.FC<RunDetailsProps> = ({ run, onHeightChange, basePath,
|
|||||||
headerName: 'CC(1/2)',
|
headerName: 'CC(1/2)',
|
||||||
flex: 1,
|
flex: 1,
|
||||||
valueGetter: (params: GridValueGetterParams<ProcessingResults, string>) =>
|
valueGetter: (params: GridValueGetterParams<ProcessingResults, string>) =>
|
||||||
Array.isArray(params.row?.cchalf)
|
params.row?.cchalf && Array.isArray(params.row.cchalf)
|
||||||
? params.row.cchalf.map((point: CCPoint) => `${point.value.toFixed(2)}@${point.resolution.toFixed(2)}`).join(', ')
|
? params.row.cchalf.map((point: CCPoint) => `${point.value.toFixed(2)}@${point.resolution.toFixed(2)}`).join(', ')
|
||||||
: '',
|
: '',
|
||||||
},
|
},
|
||||||
{ field: 'completeness', headerName: 'Completeness (%)', flex: 1 },
|
{
|
||||||
{ field: 'multiplicity', headerName: 'Multiplicity', flex: 1 },
|
field: 'completeness',
|
||||||
|
headerName: 'Completeness (%)',
|
||||||
|
flex: 1,
|
||||||
|
valueGetter: (params: GridValueGetterParams<ProcessingResults, string>) =>
|
||||||
|
params.row?.completeness
|
||||||
|
? Array.isArray(params.row.completeness)
|
||||||
|
? params.row.completeness.map((value: CCPoint) => `${value.value.toFixed(2)}@${value.resolution.toFixed(2)}`).join(', ')
|
||||||
|
: params.row.completeness.toFixed(2)
|
||||||
|
: 'N/A',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
field: 'multiplicity',
|
||||||
|
headerName: 'Multiplicity',
|
||||||
|
flex: 1,
|
||||||
|
valueGetter: (params: GridValueGetterParams<ProcessingResults, string>) =>
|
||||||
|
params.row?.multiplicity
|
||||||
|
? Array.isArray(params.row.multiplicity)
|
||||||
|
? params.row.multiplicity.map((value: CCPoint) => `${value.value.toFixed(2)}@${value.resolution.toFixed(2)}`).join(', ')
|
||||||
|
: params.row.multiplicity.toFixed(2)
|
||||||
|
: 'N/A',
|
||||||
|
},
|
||||||
{ field: 'nobs', headerName: 'N obs.', flex: 1 },
|
{ field: 'nobs', headerName: 'N obs.', flex: 1 },
|
||||||
{ field: 'total_refl', headerName: 'Total Reflections', flex: 1 },
|
{ field: 'total_refl', headerName: 'Total Reflections', flex: 1 },
|
||||||
{ field: 'unique_refl', headerName: 'Unique Reflections', flex: 1 },
|
{ field: 'unique_refl', headerName: 'Unique Reflections', flex: 1 },
|
||||||
{ field: 'comments', headerName: 'Comments', flex: 2 },
|
{ field: 'comments', headerName: 'Comments', flex: 2 },
|
||||||
];
|
];
|
||||||
|
|
||||||
|
|
||||||
const updateHeight = () => {
|
const updateHeight = () => {
|
||||||
if (containerRef.current) {
|
if (containerRef.current) {
|
||||||
const newHeight = containerRef.current.offsetHeight;
|
const newHeight = containerRef.current.offsetHeight;
|
||||||
@ -303,31 +362,71 @@ const RunDetails: React.FC<RunDetailsProps> = ({ run, onHeightChange, basePath,
|
|||||||
</div>
|
</div>
|
||||||
|
|
||||||
{processingResult && processingResult.length > 0 && (
|
{processingResult && processingResult.length > 0 && (
|
||||||
<div style={{width: 400, marginTop: '16px' }}>
|
<div style={{width: 400, marginTop: '16px'}}>
|
||||||
<Typography variant="h6" gutterBottom>CC and CC(1/2) vs Resolution</Typography>
|
<Typography variant="h6" gutterBottom>Processing Metrics vs Resolution</Typography>
|
||||||
<LineChart
|
<LineChart
|
||||||
xAxis={[
|
xAxis={[
|
||||||
{
|
{
|
||||||
data: processingResult[0].cc
|
data: processingResult[0].cc
|
||||||
.map((point) => point.resolution) // Grab the resolution values
|
.map((point) => point.resolution) // Use resolution values for the x-axis
|
||||||
.reverse(), // Reverse the data for resolution
|
.reverse(), // Reverse the resolution values to go from high-res to low-res
|
||||||
label: 'Resolution (Å)',
|
label: 'Resolution (Å)',
|
||||||
reverse: true, // This ensures the visual flip on the chart, low-res to right and high-res to left
|
reverse: true, // Flip visually so low-res is to the right
|
||||||
},
|
},
|
||||||
]}
|
]}
|
||||||
series={[
|
series={[
|
||||||
{
|
{
|
||||||
data: processingResult[0].cc
|
data: processingResult[0].cc
|
||||||
.map((point) => point.value)
|
.map((point) => point.value) // Map CC values
|
||||||
.reverse(), // Reverse the CC values to match the reversed resolution
|
.reverse(), // Reverse order for visual consistency
|
||||||
label: 'CC',
|
label: 'CC',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
data: processingResult[0].cchalf
|
data: processingResult[0].cchalf
|
||||||
.map((point) => point.value)
|
.map((point) => point.value) // Map CC(1/2) values
|
||||||
.reverse(), // Reverse the CC(1/2) values to match the reversed resolution
|
.reverse(),
|
||||||
label: 'CC(1/2)',
|
label: 'CC(1/2)',
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
data: Array.isArray(processingResult[0].rmerge)
|
||||||
|
? processingResult[0].rmerge
|
||||||
|
.map((point: CCPoint) => point.value) // Map Rmerge values
|
||||||
|
.reverse()
|
||||||
|
: [], // Handle edge case where Rmerge isn't an array
|
||||||
|
label: 'Rmerge',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
data: Array.isArray(processingResult[0].rmeas)
|
||||||
|
? processingResult[0].rmeas
|
||||||
|
.map((point: CCPoint) => point.value) // Map Rmeas values
|
||||||
|
.reverse()
|
||||||
|
: [],
|
||||||
|
label: 'Rmeas',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
data: Array.isArray(processingResult[0].isig)
|
||||||
|
? processingResult[0].isig
|
||||||
|
.map((point: CCPoint) => point.value) // Map I/sig(I) values
|
||||||
|
.reverse()
|
||||||
|
: [],
|
||||||
|
label: 'I/sig(I)',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
data: Array.isArray(processingResult[0].completeness)
|
||||||
|
? processingResult[0].completeness
|
||||||
|
.map((point: CCPoint) => point.value) // Map Completeness values
|
||||||
|
.reverse()
|
||||||
|
: [],
|
||||||
|
label: 'Completeness (%)',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
data: Array.isArray(processingResult[0].multiplicity)
|
||||||
|
? processingResult[0].multiplicity
|
||||||
|
.map((point: CCPoint) => point.value) // Map Multiplicity values
|
||||||
|
.reverse()
|
||||||
|
: [],
|
||||||
|
label: 'Multiplicity',
|
||||||
|
},
|
||||||
]}
|
]}
|
||||||
height={300}
|
height={300}
|
||||||
/>
|
/>
|
||||||
|
@ -1,10 +1,16 @@
|
|||||||
// Planning.tsx
|
|
||||||
import React from 'react';
|
import React from 'react';
|
||||||
import CustomCalendar from '../components/Calendar.tsx';
|
import CustomCalendar from '../components/Calendar.tsx';
|
||||||
|
|
||||||
const PlanningView: React.FC = () => {
|
interface PlanningViewProps {
|
||||||
return <CustomCalendar />;
|
onPgroupChange?: (pgroup: string) => void;
|
||||||
//return <div>Welcome to the Planning Page</div>;
|
activePgroup: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
const PlanningView: React.FC<PlanningViewProps> = ({ onPgroupChange, activePgroup }) => {
|
||||||
|
return <CustomCalendar
|
||||||
|
activePgroup={activePgroup}
|
||||||
|
onPgroupChange={onPgroupChange}
|
||||||
|
/>;
|
||||||
};
|
};
|
||||||
|
|
||||||
export default PlanningView;
|
export default PlanningView;
|
@ -1,23 +1,49 @@
|
|||||||
// components/ResultView.tsx
|
import React, { useEffect } from 'react';
|
||||||
|
import { useParams, useSearchParams, useNavigate } from 'react-router-dom';
|
||||||
import React from 'react';
|
|
||||||
import SampleTracker from '../components/SampleTracker';
|
import SampleTracker from '../components/SampleTracker';
|
||||||
import ResultGrid from '../components/ResultGrid';
|
import ResultGrid from '../components/ResultGrid';
|
||||||
|
|
||||||
interface ResultsViewProps {
|
interface ResultsViewProps {
|
||||||
activePgroup: string;
|
onPgroupChange?: (pgroup: string) => void; // Callback to notify about pgroup changes
|
||||||
|
currentPgroup: string; // Currently selected pgroup
|
||||||
}
|
}
|
||||||
|
|
||||||
const ResultsView: React.FC<ResultsViewProps> = ({activePgroup
|
const ResultsView: React.FC<ResultsViewProps> = ({ onPgroupChange, currentPgroup }) => {
|
||||||
}) => {
|
const { beamtimeId } = useParams();
|
||||||
|
const [searchParams] = useSearchParams();
|
||||||
|
const navigate = useNavigate();
|
||||||
|
|
||||||
|
// Get the active pgroup for the experiment from the query params.
|
||||||
|
const activePgroup = searchParams.get("pgroup") ?? ''; // Default to an empty string if missing
|
||||||
|
|
||||||
|
// Redirect if the selected pgroup does not match the beamtime's pgroup
|
||||||
|
useEffect(() => {
|
||||||
|
if (!currentPgroup || currentPgroup !== activePgroup) {
|
||||||
|
console.warn(
|
||||||
|
`Redirecting to BeamtimeOverview because selected pgroup (${currentPgroup || "undefined"}) does not match beamtime's pgroup (${activePgroup})`
|
||||||
|
);
|
||||||
|
navigate('/beamtime-overview'); // Redirect to BeamtimeOverview
|
||||||
|
}
|
||||||
|
}, [currentPgroup, activePgroup, navigate]);
|
||||||
|
|
||||||
|
// Notify parent about the selected pgroup (if needed)
|
||||||
|
useEffect(() => {
|
||||||
|
// Synchronize the pgroup when the component loads
|
||||||
|
if (onPgroupChange && activePgroup !== currentPgroup) {
|
||||||
|
onPgroupChange(activePgroup); // Update the selected pgroup
|
||||||
|
}
|
||||||
|
}, [onPgroupChange, activePgroup, currentPgroup]);
|
||||||
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div>
|
<div>
|
||||||
<h1>Results Page</h1>
|
<h1>Results Page</h1>
|
||||||
<SampleTracker activePgroup={activePgroup}/>
|
<h2>Results for Beamtime ID: {beamtimeId}</h2>
|
||||||
<ResultGrid activePgroup={activePgroup} />
|
|
||||||
</div>
|
|
||||||
|
|
||||||
|
{/* Use the beamtimeId to filter or query specific results */}
|
||||||
|
<SampleTracker activePgroup={activePgroup} beamtimeId={beamtimeId} />
|
||||||
|
<ResultGrid activePgroup={activePgroup} beamtimeId={beamtimeId} />
|
||||||
|
</div>
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -1,46 +1,17 @@
|
|||||||
.calendar-container {
|
.fc-event-shift {
|
||||||
width: 80%;
|
position: absolute !important; /* Enables proper alignment */
|
||||||
margin: 0 auto;
|
font-size: 12px; /* Text size for better clarity */
|
||||||
}
|
line-height: 1.2; /* Improve readability */
|
||||||
|
height: auto !important; /* Flexible height based on content */
|
||||||
/* Styling each day cell */
|
min-height: 25px; /* Ensure adequate space vertically */
|
||||||
.fc-daygrid-day-frame {
|
width: 28% !important; /* Prevent events from spanning full cell width */
|
||||||
position: relative; /* Ensure positioning for child elements */
|
border: 1px solid #555; /* Consistent event border */
|
||||||
border: 1px solid #e0e0e0; /* Grid cell border for better visibility */
|
border-radius: 4px; /* Rounded corners */
|
||||||
}
|
background-color: rgba(255, 255, 255, 0.9); /* Default background */
|
||||||
|
white-space: nowrap; /* Prevent text wrapping */
|
||||||
/* Event styling */
|
overflow: hidden; /* Hide overflowing content */
|
||||||
.fc-event {
|
text-overflow: ellipsis; /* Show '...' for long titles */
|
||||||
border-radius: 3px; /* Rounded corners for events */
|
display: flex; /* Align content vertically and horizontally */
|
||||||
padding: 4px; /* Padding for events */
|
justify-content: center; /* Center horizontal alignment */
|
||||||
font-size: 12px; /* Font size for event text */
|
align-items: center; /* Center vertical alignment */
|
||||||
cursor: pointer; /* Pointer cursor for events */
|
}
|
||||||
box-sizing: border-box; /* Include padding in the width/height */
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Selected event styling */
|
|
||||||
.fc-event-selected {
|
|
||||||
border: 2px solid black; /* Border for selected events */
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Optional: Add hover effect for events */
|
|
||||||
.fc-event:hover {
|
|
||||||
background-color: #FF7043; /* Change color on hover */
|
|
||||||
}
|
|
||||||
|
|
||||||
.event-details {
|
|
||||||
margin-top: 20px;
|
|
||||||
padding: 15px;
|
|
||||||
border: 1px solid #ccc;
|
|
||||||
border-radius: 5px;
|
|
||||||
background-color: #f9f9f9;
|
|
||||||
}
|
|
||||||
|
|
||||||
.event-details h3 {
|
|
||||||
margin: 0 0 10px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.event-details label {
|
|
||||||
display: block;
|
|
||||||
margin-bottom: 10px;
|
|
||||||
}
|
|
@ -16,4 +16,6 @@ mysqlclient~=2.1.1
|
|||||||
python-multipart~=0.0.6
|
python-multipart~=0.0.6
|
||||||
uvicorn==0.23.1
|
uvicorn==0.23.1
|
||||||
python-dotenv
|
python-dotenv
|
||||||
psycopg2-binary
|
psycopg2-binary
|
||||||
|
python-dateutil~=2.8.2
|
||||||
|
urllib3~=2.2.1
|
File diff suppressed because one or more lines are too long
Reference in New Issue
Block a user