diff --git a/pyproject.toml b/pyproject.toml index ce146e14d..5c4cf385c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -115,6 +115,7 @@ TomographyMetadataContext = "murfey.client.contexts.tomo_metadata:TomographyMeta "data_collection" = "murfey.workflows.register_data_collection:run" "data_collection_group" = "murfey.workflows.register_data_collection_group:run" "experiment_type_update" = "murfey.workflows.register_experiment_type_update:run" +"fib.register_atlas" = "murfey.workflows.fib.register_atlas:run" "pato" = "murfey.workflows.notifications:notification_setup" "picked_particles" = "murfey.workflows.spa.picking:particles_picked" "picked_tomogram" = "murfey.workflows.tomo.picking:picked_tomogram" diff --git a/src/murfey/client/contexts/fib.py b/src/murfey/client/contexts/fib.py index 453ceaece..587f67152 100644 --- a/src/murfey/client/contexts/fib.py +++ b/src/murfey/client/contexts/fib.py @@ -6,7 +6,6 @@ from datetime import datetime from pathlib import Path from typing import NamedTuple -from xml.etree import ElementTree as ET import xmltodict @@ -30,21 +29,6 @@ class MillingProgress(NamedTuple): timestamp: float -class ElectronSnapshotMetadata(NamedTuple): - slot_num: int | None # Which slot in the FIB-SEM it is from - image_num: int - image_dir: str # Partial path from EMproject.emxml parent to the image - status: str - x_len: float | None - y_len: float | None - z_len: float | None - x_center: float | None - y_center: float | None - z_center: float | None - extent: tuple[float, float, float, float] | None - rotation_angle: float | None - - def _number_from_name(name: str) -> int: """ In the AutoTEM and Maps workflows for the FIB, the sites and images are @@ -89,72 +73,6 @@ def _file_transferred_to( return destination -def _parse_electron_snapshot_metadata(xml_file: Path): - metadata_dict = {} - root = ET.parse(xml_file).getroot() - datasets = root.findall(".//Datasets/Dataset") - for dataset in datasets: - # Extract all string-based values - name, image_dir, status = [ - node.text - if ((node := dataset.find(node_path)) is not None and node.text is not None) - else "" - for node_path in ( - ".//Name", - ".//FinalImages", - ".//Status", - ) - ] - - # Extract all float values - cx, cy, cz, x_len, y_len, z_len, rotation_angle = [ - float(node.text) - if ((node := dataset.find(node_path)) is not None and node.text is not None) - else None - for node_path in ( - ".//BoxCenter/CenterX", - ".//BoxCenter/CenterY", - ".//BoxCenter/CenterZ", - ".//BoxSize/SizeX", - ".//BoxSize/SizeY", - ".//BoxSize/SizeZ", - ".//RotationAngle", - ) - ] - - # Calculate the extent of the image - extent = None - if ( - cx is not None - and cy is not None - and x_len is not None - and y_len is not None - ): - extent = ( - x_len - (cx / 2), - x_len + (cx / 2), - y_len - (cy / 2), - y_len - (cy / 2), - ) - - # Append metadata for current site to dict - metadata_dict[name] = ElectronSnapshotMetadata( - slot_num=None if cx is None else (1 if cx < 0 else 2), - image_num=_number_from_name(name), - status=status, - image_dir=image_dir, - x_len=x_len, - y_len=y_len, - z_len=z_len, - x_center=cx, - y_center=cy, - z_center=cz, - extent=extent, - rotation_angle=rotation_angle, - ) - return metadata_dict - - class FIBContext(Context): def __init__( self, @@ -168,9 +86,6 @@ def __init__( self._machine_config = machine_config self._milling: dict[int, list[MillingProgress]] = {} self._lamellae: dict[int, Lamella] = {} - self._electron_snapshots: dict[str, Path] = {} - self._electron_snapshot_metadata: dict[str, ElectronSnapshotMetadata] = {} - self._electron_snapshots_submitted: set[str] = set() def post_transfer( self, @@ -268,34 +183,11 @@ def post_transfer( # Maps # ----------------------------------------------------------------------------- elif self._acquisition_software == "maps": - # Electron snapshot metadata file - if transferred_file.name == "EMproject.emxml": - # Extract all "Electron Snapshot" metadata and store it - self._electron_snapshot_metadata = _parse_electron_snapshot_metadata( - transferred_file - ) - # If dataset hasn't been transferred, register it - for dataset_name in list(self._electron_snapshot_metadata.keys()): - if dataset_name not in self._electron_snapshots_submitted: - if dataset_name in self._electron_snapshots: - logger.info(f"Registering {dataset_name!r}") - - ## Workflow to trigger goes here - - # Clear old entry after triggering workflow - self._electron_snapshots_submitted.add(dataset_name) - with lock: - self._electron_snapshots.pop(dataset_name, None) - self._electron_snapshot_metadata.pop(dataset_name, None) - else: - logger.debug(f"Waiting for image for {dataset_name}") - # Electron snapshot image - elif ( + if ( + # Electron snapshot images are grid atlases "Electron Snapshot" in transferred_file.name and transferred_file.suffix in (".tif", ".tiff") ): - # Store file in Context memory - dataset_name = transferred_file.stem if not (source := _get_source(transferred_file, environment)): logger.warning(f"No source found for file {transferred_file}") return @@ -313,24 +205,34 @@ def post_transfer( f"File {transferred_file.name!r} not found on storage system" ) return - self._electron_snapshots[dataset_name] = destination_file - - if dataset_name not in self._electron_snapshots_submitted: - # If the metadata and image are both present, register dataset - if dataset_name in list(self._electron_snapshot_metadata.keys()): - logger.info(f"Registering {dataset_name!r}") - ## Workflow to trigger goes here + # Register image in database + self._register_atlas(destination_file, environment) + return - # Clear old entry after triggering workflow - self._electron_snapshots_submitted.add(dataset_name) - with lock: - self._electron_snapshots.pop(dataset_name, None) - self._electron_snapshot_metadata.pop(dataset_name, None) - else: - logger.debug(f"Waiting for metadata for {dataset_name}") # ----------------------------------------------------------------------------- # Meteor # ----------------------------------------------------------------------------- elif self._acquisition_software == "meteor": pass + + def _register_atlas(self, file: Path, environment: MurfeyInstanceEnvironment): + """ + Constructs the URL and dictionary to be posted to the server, which then triggers + the processing of the electron snapshot image. + """ + + try: + capture_post( + base_url=str(environment.url.geturl()), + router_name="workflow_fib.router", + function_name="register_fib_atlas", + token=self._token, + data={"file": str(file)}, + session_id=environment.murfey_session, + ) + logger.info(f"Registering atlas image {file.name!r}") + return True + except Exception as e: + logger.error(f"Error encountered registering atlas image {file.name}:\n{e}") + return False diff --git a/src/murfey/server/api/workflow_fib.py b/src/murfey/server/api/workflow_fib.py new file mode 100644 index 000000000..5e21f4972 --- /dev/null +++ b/src/murfey/server/api/workflow_fib.py @@ -0,0 +1,45 @@ +import logging +from importlib.metadata import entry_points +from pathlib import Path + +from fastapi import APIRouter, Depends +from pydantic import BaseModel +from sqlmodel import Session + +from murfey.server.api.auth import validate_instrument_token +from murfey.server.murfey_db import murfey_db + +logger = logging.getLogger("murfey.server.api.workflow_fib") + +router = APIRouter( + prefix="/workflow/fib", + dependencies=[Depends(validate_instrument_token)], + tags=["Workflows: FIB milling"], +) + + +class FIBAtlasInfo(BaseModel): + file: Path | None = None + + +@router.post("/sessions/{session_id}/register_atlas") +def register_fib_atlas( + session_id: int, + fib_atlas_info: FIBAtlasInfo, + db: Session = murfey_db, +): + # See if the relevant workflow is available + if not ( + workflow_search := list( + entry_points(group="murfey.workflows", name="fib.register_atlas") + ) + ): + raise RuntimeError("Unable to find Murfey workflow to register FIB atlas") + workflow = workflow_search[0] + + # Run the workflow + workflow.load()( + session_id=session_id, + file=fib_atlas_info.file, + murfey_db=db, + ) diff --git a/src/murfey/server/main.py b/src/murfey/server/main.py index 613546bfd..5c951176e 100644 --- a/src/murfey/server/main.py +++ b/src/murfey/server/main.py @@ -26,6 +26,7 @@ import murfey.server.api.session_info import murfey.server.api.websocket import murfey.server.api.workflow +import murfey.server.api.workflow_fib from murfey.server import template_files from murfey.util.config import get_security_config @@ -97,6 +98,7 @@ class Settings(BaseSettings): app.include_router(murfey.server.api.workflow.spa_router) app.include_router(murfey.server.api.workflow.tomo_router) app.include_router(murfey.server.api.clem.router) +app.include_router(murfey.server.api.workflow_fib.router) app.include_router(murfey.server.api.prometheus.router) diff --git a/src/murfey/util/db.py b/src/murfey/util/db.py index 8f19d0427..0b8ecd5db 100644 --- a/src/murfey/util/db.py +++ b/src/murfey/util/db.py @@ -99,29 +99,27 @@ class ImagingSite(SQLModel, table=True): # type: ignore image_path: Optional[str] = Field(default=None) thumbnail_path: Optional[str] = Field(default=None) - # Link to Session table - session: Optional["Session"] = Relationship( - back_populates="imaging_sites" - ) # Many to one - session_id: Optional[int] = Field( - foreign_key="session.id", default=None, unique=False - ) - # Type of data (atlas/overview or grid square) data_type: Optional[str] = Field(default=None) # "atlas" or "grid_square" - # Link to data collection group - data_collection_group: Optional["DataCollectionGroup"] = Relationship( - back_populates="imaging_sites" - ) - dcg_id: Optional[int] = Field( - foreign_key="datacollectiongroup.dataCollectionGroupId", default=None - ) - dcg_name: Optional[str] = Field(default=None) + # Stage position (image centre) and rotation + pos_x: Optional[float] = Field(default=None) + pos_y: Optional[float] = Field(default=None) + pos_z: Optional[float] = Field(default=None) + rotation: Optional[float] = Field(default=None) + tilt_alpha: Optional[float] = Field(default=None) + tilt_beta: Optional[float] = Field(default=None) - # Link to grid squares - grid_square: Optional["GridSquare"] = Relationship(back_populates="imaging_sites") - grid_square_id: Optional[int] = Field(foreign_key="gridsquare.id", default=None) + # Field and depth of view + len_x: Optional[float] = Field(default=None) + len_y: Optional[float] = Field(default=None) + len_z: Optional[float] = Field(default=None) + + # Extent of the imaged area in real space + x0: Optional[float] = Field(default=None) + x1: Optional[float] = Field(default=None) + y0: Optional[float] = Field(default=None) + y1: Optional[float] = Field(default=None) # Shape and resolution information image_pixels_x: Optional[int] = Field(default=None) @@ -130,13 +128,9 @@ class ImagingSite(SQLModel, table=True): # type: ignore thumbnail_pixels_x: Optional[int] = Field(default=None) thumbnail_pixels_y: Optional[int] = Field(default=None) thumbnail_pixel_size: Optional[float] = Field(default=None) - units: Optional[str] = Field(default=None) - # Extent of the imaged area in real space - x0: Optional[float] = Field(default=None) - x1: Optional[float] = Field(default=None) - y0: Optional[float] = Field(default=None) - y1: Optional[float] = Field(default=None) + # Spatial units + units: Optional[str] = Field(default=None) # Colour channel-related fields number_of_members: Optional[int] = Field(default=None) @@ -150,6 +144,33 @@ class ImagingSite(SQLModel, table=True): # type: ignore collection_mode: Optional[str] = Field(default=None) composite_created: bool = False # Has a composite image been created? + # ------------- + # Relationships + # ------------- + + # Session + session: Optional["Session"] = Relationship( + back_populates="imaging_sites" + ) # Many-to-one + session_id: Optional[int] = Field( + foreign_key="session.id", default=None, unique=False + ) + + # DataCollectionGroup + data_collection_group: Optional["DataCollectionGroup"] = Relationship( + back_populates="imaging_sites" + ) # Many-to-one + dcg_id: Optional[int] = Field( + foreign_key="datacollectiongroup.dataCollectionGroupId", default=None + ) + dcg_name: Optional[str] = Field(default=None) + + # GridSquare + grid_square: Optional["GridSquare"] = Relationship( + back_populates="imaging_sites" + ) # Many-to-one + grid_square_id: Optional[int] = Field(foreign_key="gridsquare.id", default=None) + """ TEM SESSION AND PROCESSING WORKFLOW diff --git a/src/murfey/util/route_manifest.yaml b/src/murfey/util/route_manifest.yaml index db1ea9fd1..cc1e8fb77 100644 --- a/src/murfey/util/route_manifest.yaml +++ b/src/murfey/util/route_manifest.yaml @@ -1423,3 +1423,11 @@ murfey.server.api.workflow.tomo_router: type: int methods: - POST +murfey.server.api.workflow_fib.router: + - path: /workflow/fib/sessions/{session_id}/register_atlas + function: register_fib_atlas + path_params: + - name: session_id + type: int + methods: + - POST diff --git a/src/murfey/workflows/fib/register_atlas.py b/src/murfey/workflows/fib/register_atlas.py new file mode 100644 index 000000000..22afa27f3 --- /dev/null +++ b/src/murfey/workflows/fib/register_atlas.py @@ -0,0 +1,11 @@ +from pathlib import Path + +from sqlmodel import Session + + +def run( + session_id: int, + file: Path, + murfey_db: Session, +): + pass diff --git a/tests/client/contexts/test_fib.py b/tests/client/contexts/test_fib.py index 035453a49..3e45f0b91 100644 --- a/tests/client/contexts/test_fib.py +++ b/tests/client/contexts/test_fib.py @@ -1,6 +1,4 @@ -import xml.etree.ElementTree as ET from pathlib import Path -from typing import Any from unittest.mock import MagicMock import pytest @@ -18,183 +16,19 @@ # ------------------------------------------------------------------------------------- -def create_fib_maps_dataset_element( - id: int, - name: str, - relative_path: str, - center_x: float, - center_y: float, - center_z: float, - size_x: float, - size_y: float, - size_z: float, - rotation_angle: float, - status: str, -): - # Create dataset node - dataset = ET.Element("Dataset") - # ID node - id_node = ET.Element("Id") - id_node.text = str(id) - dataset.append(id_node) - - # Name node - name_node = ET.Element("Name") - name_node.text = name - dataset.append(name_node) - - # Stage position node - box_center = ET.Element("BoxCenter") - for tag, value in ( - ("CenterX", center_x), - ("CenterY", center_y), - ("CenterZ", center_z), - ): - node = ET.Element(tag) - node.text = str(value) - box_center.append(node) - dataset.append(box_center) - - # Image size node - box_size = ET.Element("BoxSize") - for tag, value in ( - ("SizeX", size_x), - ("SizeY", size_y), - ("SizeZ", size_z), - ): - node = ET.Element(tag) - node.text = str(value) - box_size.append(node) - dataset.append(box_size) - - # Rotation angle - angle_node = ET.Element("RotationAngle") - angle_node.text = str(rotation_angle) - dataset.append(angle_node) - - # Relative path - image_path_node = ET.Element("FinalImages") - image_path_node.text = relative_path.replace("/", "\\") - dataset.append(image_path_node) - - # Status - status_node = ET.Element("Status") - status_node.text = status - dataset.append(status_node) - - return dataset - - -def create_fib_maps_xml_metadata( - project_name: str, - datasets: list[dict[str, Any]], -): - # Create root node - root = ET.Element("EMProject") - - # Project name node - project_name_node = ET.Element("ProjectName") - project_name_node.text = project_name - root.append(project_name_node) - - # Datasets node - datasets_node = ET.Element("Datasets") - for id, dataset in enumerate(datasets): - datasets_node.append(create_fib_maps_dataset_element(id, **dataset)) - root.append(datasets_node) - - return root - - -fib_maps_test_datasets = [ - { - "name": name, - "relative_path": relative_path, - "center_x": cx, - "center_y": cy, - "center_z": cz, - "size_x": sx, - "size_y": sy, - "size_z": sz, - "rotation_angle": ra, - "status": "Finished", - } - for (name, relative_path, cx, cy, cz, sx, sy, sz, ra) in ( - ( - "Electron Snapshot", - "LayersData/Layer/Electron Snapshot", - -0.002, - -0.004, - 0.00000008, - 0.0036, - 0.0024, - 0.0, - 3.1415926535897931, - ), - ( - "Electron Snapshot (2)", - "LayersData/Layer/Electron Snapshot (2)", - -0.002, - -0.004, - 0.00000008, - 0.0036, - 0.0024, - 0.0, - 3.1415926535897931, - ), - ( - "Electron Snapshot (3)", - "LayersData/Layer/Electron Snapshot (3)", - 0.002, - 0.004, - 0.00000008, - 0.0036, - 0.0024, - 0.0, - 3.1415926535897931, - ), - ( - "Electron Snapshot (4)", - "LayersData/Layer/Electron Snapshot (4)", - 0.002, - 0.004, - 0.00000008, - 0.0036, - 0.0024, - 0.0, - 3.1415926535897931, - ), - ) -] - - @pytest.fixture def visit_dir(tmp_path: Path): return tmp_path / "visit" @pytest.fixture -def fib_maps_metadata_file(visit_dir: Path): - metadata = create_fib_maps_xml_metadata( - "test-project", - fib_maps_test_datasets, - ) - tree = ET.ElementTree(metadata) - ET.indent(tree, space=" ") - save_path = visit_dir / "maps/visit/EMproject.emxml" - if not save_path.parent.exists(): - save_path.parent.mkdir(parents=True, exist_ok=True) - tree.write(save_path, encoding="utf-8") - return save_path - - -@pytest.fixture -def fib_maps_images(fib_maps_metadata_file: Path): +def fib_maps_images(visit_dir: Path): image_list = [] - for dataset in fib_maps_test_datasets: - name = str(dataset["name"]) - relative_path = str(dataset["relative_path"]) - file = fib_maps_metadata_file.parent / relative_path / f"{name}.tiff" + for i in range(4): + name = "Electron Snapshot" + if i > 0: + name += f" ({i})" + file = visit_dir / "maps/visit/LayersData/Layer" / f"{name}.tiff" if not file.exists(): file.parent.mkdir(parents=True, exist_ok=True) file.touch() @@ -229,7 +63,6 @@ def test_get_source( tmp_path: Path, visit_dir: Path, fib_maps_images: list[Path], - fib_maps_metadata_file: Path, ): # Mock the MurfeyInstanceEnvironment mock_environment = MagicMock() @@ -238,7 +71,7 @@ def test_get_source( tmp_path / "another_dir", ] # Check that the correct source directory is found - for file in [fib_maps_metadata_file, *fib_maps_images]: + for file in fib_maps_images: assert _get_source(file, mock_environment) == visit_dir @@ -246,7 +79,6 @@ def test_file_transferred_to( tmp_path: Path, visit_dir: Path, fib_maps_images: list[Path], - fib_maps_metadata_file: Path, ): # Mock the environment mock_environment = MagicMock() @@ -255,7 +87,7 @@ def test_file_transferred_to( # Iterate across the FIB files to compare against destination_dir = tmp_path / "fib" / "data" / "current_year" / "visit" - for file in [fib_maps_metadata_file, *fib_maps_images]: + for file in fib_maps_images: # Work out what the expected destination will be assert _file_transferred_to( environment=mock_environment, @@ -265,29 +97,36 @@ def test_file_transferred_to( ) == destination_dir / file.relative_to(visit_dir) -def test_parse_electron_snapshot_metadata(): - pass - - def test_fib_autotem_context(): pass -@pytest.mark.parametrize("metadata_first", ((False, True))) def test_fib_maps_context( mocker: MockerFixture, tmp_path: Path, - fib_maps_metadata_file: Path, + visit_dir: Path, fib_maps_images: list[Path], - metadata_first: bool, ): - # Mock out irrelevant functions - mocker.patch("murfey.client.contexts.fib._get_source", return_value=tmp_path) - mocker.patch( - "murfey.client.contexts.fib._file_transferred_to", side_effect=fib_maps_images - ) + # Mock the environment mock_environment = MagicMock() + # Create a list of destinations + destination_dir = tmp_path / "fib" / "data" / "current_year" / "visit" + destination_files = [ + destination_dir / file.relative_to(visit_dir) for file in fib_maps_images + ] + + # Mock the functions used in 'post_transfer' + mock_get_source = mocker.patch( + "murfey.client.contexts.fib._get_source", return_value=tmp_path + ) + mock_file_transferred_to = mocker.patch( + "murfey.client.contexts.fib._file_transferred_to", side_effect=destination_files + ) + mock_register_fib_atlas = mocker.patch.object( + FIBContext, "_register_atlas", return_value=True + ) + # Initialise the FIBContext basepath = tmp_path context = FIBContext( @@ -296,46 +135,17 @@ def test_fib_maps_context( machine_config={}, token="", ) - # Assert that its initial state is correct - assert not context._electron_snapshots - assert not context._electron_snapshot_metadata - assert not context._electron_snapshots_submitted - if metadata_first: - # Read the metadata first - context.post_transfer(fib_maps_metadata_file, mock_environment) - # Metadata field should now be populated - assert all( - name in context._electron_snapshot_metadata.keys() - for name in [image.stem for image in fib_maps_images] - ) - # Parse the images one-by-one - for image in fib_maps_images: - name = image.stem - context.post_transfer(image, mock_environment) - # Entries should now start being removed from 'metadata' and 'images' fields - assert ( - name not in context._electron_snapshots.keys() - and name not in context._electron_snapshot_metadata.keys() - and name in context._electron_snapshots_submitted - ) - else: - # Read in images first - for image in fib_maps_images: - name = image.stem - context.post_transfer(image, mock_environment) - assert ( - name in context._electron_snapshots.keys() - and name not in context._electron_snapshot_metadata.keys() - and name not in context._electron_snapshots_submitted - ) - # Read in the metadata - context.post_transfer(fib_maps_metadata_file, mock_environment) - assert all( - name in context._electron_snapshots_submitted - and name not in context._electron_snapshots.keys() - and name not in context._electron_snapshot_metadata.keys() - for name in [file.stem for file in fib_maps_images] + # Parse images one-by-one + for file in fib_maps_images: + context.post_transfer(file, environment=mock_environment) + assert mock_get_source.call_count == len(fib_maps_images) + assert mock_file_transferred_to.call_count == len(fib_maps_images) + assert mock_register_fib_atlas.call_count == len(fib_maps_images) + for dst in destination_files: + mock_register_fib_atlas.assert_any_call( + dst, + mock_environment, ) diff --git a/tests/server/api/test_workflow_fib.py b/tests/server/api/test_workflow_fib.py new file mode 100644 index 000000000..e72deb57e --- /dev/null +++ b/tests/server/api/test_workflow_fib.py @@ -0,0 +1,54 @@ +from pathlib import Path +from unittest.mock import MagicMock + +import pytest +from pytest_mock import MockerFixture + +from murfey.server.api.workflow_fib import FIBAtlasInfo, register_fib_atlas + + +def test_register_fib_atlas( + mocker: MockerFixture, + tmp_path: Path, +): + # Mock the databse instance + mock_db = MagicMock() + + # Patch out the entry point being called + mock_register_fib_atlas = mocker.patch("murfey.workflows.fib.register_atlas.run") + + session_id = 1 + fib_atlas_info = FIBAtlasInfo(**{"file": str(tmp_path / "dummy")}) + + # Run the function and check that the expected calls were made + register_fib_atlas( + session_id=session_id, + fib_atlas_info=fib_atlas_info, + db=mock_db, + ) + mock_register_fib_atlas.assert_called_once_with( + session_id=session_id, + file=fib_atlas_info.file, + murfey_db=mock_db, + ) + + +def test_register_fib_atlas_no_entry_point( + mocker: MockerFixture, + tmp_path: Path, +): + # Mock out entry_points to return an empty list + mocker.patch("murfey.server.api.workflow_fib.entry_points", return_value=[]) + + # Mock the databse instance + mock_db = MagicMock() + + fib_atlas_info = FIBAtlasInfo(**{"file": str(tmp_path / "dummy")}) + + # Patch out the entry point being called + with pytest.raises(RuntimeError): + register_fib_atlas( + session_id=1, + fib_atlas_info=fib_atlas_info, + db=mock_db, + )