Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -115,6 +115,7 @@ TomographyMetadataContext = "murfey.client.contexts.tomo_metadata:TomographyMeta
"data_collection" = "murfey.workflows.register_data_collection:run"
"data_collection_group" = "murfey.workflows.register_data_collection_group:run"
"experiment_type_update" = "murfey.workflows.register_experiment_type_update:run"
"fib.register_atlas" = "murfey.workflows.fib.register_atlas:run"
"pato" = "murfey.workflows.notifications:notification_setup"
"picked_particles" = "murfey.workflows.spa.picking:particles_picked"
"picked_tomogram" = "murfey.workflows.tomo.picking:picked_tomogram"
Expand Down
150 changes: 26 additions & 124 deletions src/murfey/client/contexts/fib.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
from datetime import datetime
from pathlib import Path
from typing import NamedTuple
from xml.etree import ElementTree as ET

import xmltodict

Expand All @@ -30,21 +29,6 @@ class MillingProgress(NamedTuple):
timestamp: float


class ElectronSnapshotMetadata(NamedTuple):
slot_num: int | None # Which slot in the FIB-SEM it is from
image_num: int
image_dir: str # Partial path from EMproject.emxml parent to the image
status: str
x_len: float | None
y_len: float | None
z_len: float | None
x_center: float | None
y_center: float | None
z_center: float | None
extent: tuple[float, float, float, float] | None
rotation_angle: float | None


def _number_from_name(name: str) -> int:
"""
In the AutoTEM and Maps workflows for the FIB, the sites and images are
Expand Down Expand Up @@ -89,72 +73,6 @@ def _file_transferred_to(
return destination


def _parse_electron_snapshot_metadata(xml_file: Path):
metadata_dict = {}
root = ET.parse(xml_file).getroot()
datasets = root.findall(".//Datasets/Dataset")
for dataset in datasets:
# Extract all string-based values
name, image_dir, status = [
node.text
if ((node := dataset.find(node_path)) is not None and node.text is not None)
else ""
for node_path in (
".//Name",
".//FinalImages",
".//Status",
)
]

# Extract all float values
cx, cy, cz, x_len, y_len, z_len, rotation_angle = [
float(node.text)
if ((node := dataset.find(node_path)) is not None and node.text is not None)
else None
for node_path in (
".//BoxCenter/CenterX",
".//BoxCenter/CenterY",
".//BoxCenter/CenterZ",
".//BoxSize/SizeX",
".//BoxSize/SizeY",
".//BoxSize/SizeZ",
".//RotationAngle",
)
]

# Calculate the extent of the image
extent = None
if (
cx is not None
and cy is not None
and x_len is not None
and y_len is not None
):
extent = (
x_len - (cx / 2),
x_len + (cx / 2),
y_len - (cy / 2),
y_len - (cy / 2),
)

# Append metadata for current site to dict
metadata_dict[name] = ElectronSnapshotMetadata(
slot_num=None if cx is None else (1 if cx < 0 else 2),
image_num=_number_from_name(name),
status=status,
image_dir=image_dir,
x_len=x_len,
y_len=y_len,
z_len=z_len,
x_center=cx,
y_center=cy,
z_center=cz,
extent=extent,
rotation_angle=rotation_angle,
)
return metadata_dict


class FIBContext(Context):
def __init__(
self,
Expand All @@ -168,9 +86,6 @@ def __init__(
self._machine_config = machine_config
self._milling: dict[int, list[MillingProgress]] = {}
self._lamellae: dict[int, Lamella] = {}
self._electron_snapshots: dict[str, Path] = {}
self._electron_snapshot_metadata: dict[str, ElectronSnapshotMetadata] = {}
self._electron_snapshots_submitted: set[str] = set()

def post_transfer(
self,
Expand Down Expand Up @@ -268,34 +183,11 @@ def post_transfer(
# Maps
# -----------------------------------------------------------------------------
elif self._acquisition_software == "maps":
# Electron snapshot metadata file
if transferred_file.name == "EMproject.emxml":
# Extract all "Electron Snapshot" metadata and store it
self._electron_snapshot_metadata = _parse_electron_snapshot_metadata(
transferred_file
)
# If dataset hasn't been transferred, register it
for dataset_name in list(self._electron_snapshot_metadata.keys()):
if dataset_name not in self._electron_snapshots_submitted:
if dataset_name in self._electron_snapshots:
logger.info(f"Registering {dataset_name!r}")

## Workflow to trigger goes here

# Clear old entry after triggering workflow
self._electron_snapshots_submitted.add(dataset_name)
with lock:
self._electron_snapshots.pop(dataset_name, None)
self._electron_snapshot_metadata.pop(dataset_name, None)
else:
logger.debug(f"Waiting for image for {dataset_name}")
# Electron snapshot image
elif (
if (
# Electron snapshot images are grid atlases
"Electron Snapshot" in transferred_file.name
and transferred_file.suffix in (".tif", ".tiff")
):
# Store file in Context memory
dataset_name = transferred_file.stem
if not (source := _get_source(transferred_file, environment)):
logger.warning(f"No source found for file {transferred_file}")
return
Expand All @@ -313,24 +205,34 @@ def post_transfer(
f"File {transferred_file.name!r} not found on storage system"
)
return
self._electron_snapshots[dataset_name] = destination_file

if dataset_name not in self._electron_snapshots_submitted:
# If the metadata and image are both present, register dataset
if dataset_name in list(self._electron_snapshot_metadata.keys()):
logger.info(f"Registering {dataset_name!r}")

## Workflow to trigger goes here
# Register image in database
self._register_atlas(destination_file, environment)
return

# Clear old entry after triggering workflow
self._electron_snapshots_submitted.add(dataset_name)
with lock:
self._electron_snapshots.pop(dataset_name, None)
self._electron_snapshot_metadata.pop(dataset_name, None)
else:
logger.debug(f"Waiting for metadata for {dataset_name}")
# -----------------------------------------------------------------------------
# Meteor
# -----------------------------------------------------------------------------
elif self._acquisition_software == "meteor":
pass

def _register_atlas(self, file: Path, environment: MurfeyInstanceEnvironment):
"""
Constructs the URL and dictionary to be posted to the server, which then triggers
the processing of the electron snapshot image.
"""

try:
capture_post(
base_url=str(environment.url.geturl()),
router_name="workflow_fib.router",
function_name="register_fib_atlas",
token=self._token,
data={"file": str(file)},
session_id=environment.murfey_session,
)
logger.info(f"Registering atlas image {file.name!r}")
return True
except Exception as e:
logger.error(f"Error encountered registering atlas image {file.name}:\n{e}")
return False
45 changes: 45 additions & 0 deletions src/murfey/server/api/workflow_fib.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
import logging
from importlib.metadata import entry_points
from pathlib import Path

from fastapi import APIRouter, Depends
from pydantic import BaseModel
from sqlmodel import Session

from murfey.server.api.auth import validate_instrument_token
from murfey.server.murfey_db import murfey_db

logger = logging.getLogger("murfey.server.api.workflow_fib")

router = APIRouter(
prefix="/workflow/fib",
dependencies=[Depends(validate_instrument_token)],
tags=["Workflows: FIB milling"],
)


class FIBAtlasInfo(BaseModel):
file: Path | None = None


@router.post("/sessions/{session_id}/register_atlas")
def register_fib_atlas(
session_id: int,
fib_atlas_info: FIBAtlasInfo,
db: Session = murfey_db,
):
# See if the relevant workflow is available
if not (
workflow_search := list(
entry_points(group="murfey.workflows", name="fib.register_atlas")
)
):
raise RuntimeError("Unable to find Murfey workflow to register FIB atlas")
workflow = workflow_search[0]

# Run the workflow
workflow.load()(
session_id=session_id,
file=fib_atlas_info.file,
murfey_db=db,
)
2 changes: 2 additions & 0 deletions src/murfey/server/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
import murfey.server.api.session_info
import murfey.server.api.websocket
import murfey.server.api.workflow
import murfey.server.api.workflow_fib
from murfey.server import template_files
from murfey.util.config import get_security_config

Expand Down Expand Up @@ -97,6 +98,7 @@ class Settings(BaseSettings):
app.include_router(murfey.server.api.workflow.spa_router)
app.include_router(murfey.server.api.workflow.tomo_router)
app.include_router(murfey.server.api.clem.router)
app.include_router(murfey.server.api.workflow_fib.router)

app.include_router(murfey.server.api.prometheus.router)

Expand Down
8 changes: 8 additions & 0 deletions src/murfey/util/route_manifest.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -1423,3 +1423,11 @@ murfey.server.api.workflow.tomo_router:
type: int
methods:
- POST
murfey.server.api.workflow_fib.router:
- path: /workflow/fib/sessions/{session_id}/register_atlas
function: register_fib_atlas
path_params:
- name: session_id
type: int
methods:
- POST
11 changes: 11 additions & 0 deletions src/murfey/workflows/fib/register_atlas.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
from pathlib import Path

from sqlmodel import Session


def run(
session_id: int,
file: Path,
murfey_db: Session,
):
pass
Loading
Loading