Coverage for /opt/conda/envs/apienv/lib/python3.10/site-packages/daiquiri/core/components/imageviewer/__init__.py: 37%
677 statements
« prev ^ index » next coverage.py v7.6.10, created at 2025-02-06 02:13 +0000
« prev ^ index » next coverage.py v7.6.10, created at 2025-02-06 02:13 +0000
1#!/usr/bin/env python
2# -*- coding: utf-8 -*-
3import json
4import time
5import os
6import pprint
7from datetime import datetime
8from typing import Any, Dict, List
9import gevent
10from marshmallow import fields
11from flask import g
12import numpy
13from PIL import Image
15from daiquiri.core import marshal, require_control
16from daiquiri.core.logging import log
17from daiquiri.core.components import (
18 Component,
19 ComponentResource,
20 actor,
21 ComponentActorKilled,
22)
23from daiquiri.core.schema import ErrorSchema, MessageSchema
24from daiquiri.core.schema.components.imageviewer import (
25 ImageSource,
26 SourceSettings,
27 MapAdditionalSchema,
28 MapSettings,
29 MoveToReferenceSchema,
30 SelectMatrixSchema,
31 ExportReferenceSchema,
32)
33from daiquiri.core.schema.metadata import paginated
34from daiquiri.core.components.dcutilsmixin import DCUtilsMixin
35from daiquiri.core.components.imageviewer.source import Source
36from daiquiri.core.components.imageviewer.annotate import AnnotateImage
37from daiquiri.core.components.imageviewer.transform import (
38 calculate_transform_matrix,
39 export_reference_to_sampleimage,
40)
41from daiquiri.core.hardware.abstract.scansource import ScanSource
43import logging
45logger = logging.getLogger(__name__)
46pp = pprint.PrettyPrinter()
48# For large mosaics need to disable pixel check
49# https://stackoverflow.com/questions/51152059/pillow-in-python-wont-let-me-open-image-exceeds-limit
50Image.MAX_IMAGE_PIXELS = None
53class SourcesResource(ComponentResource):
54 @marshal(out=[[200, paginated(ImageSource), "A list of image/video sources"]])
55 def get(self, **kwargs):
56 """Get a list of image sources defined in the current 2d viewer"""
57 return self._parent.get_sources()
60class SourcesSettingsResource(ComponentResource):
61 @marshal(out=[[200, SourceSettings(), "Source Settings"]])
62 def get(self):
63 return {
64 "has_fine": self._parent.origin_defining_source.has_fine,
65 "fine_fixed": self._parent.origin_defining_source.fine_fixed,
66 "coarse_fixed": self._parent.origin_defining_source.coarse_fixed,
67 "config": self._parent.origin_defining_source.config,
68 }
70 @marshal(
71 inp=SourceSettings,
72 out=[
73 [200, SourceSettings(), "Updated source settings"],
74 [400, ErrorSchema(), "Could not update source settings"],
75 ],
76 )
77 def patch(self, fine_fixed: bool = None, coarse_fixed: bool = None, **kwargs):
78 if fine_fixed is not None:
79 self._parent.origin_defining_source.fine_fixed = fine_fixed
80 if coarse_fixed is not None:
81 self._parent.origin_defining_source.coarse_fixed = coarse_fixed
83 return {
84 "has_fine": self._parent.origin_defining_source.has_fine,
85 "fine_fixed": self._parent.origin_defining_source.fine_fixed,
86 "coarse_fixed": self._parent.origin_defining_source.coarse_fixed,
87 "config": self._parent.origin_defining_source.config,
88 }
91class SourceImageResource(ComponentResource):
92 @marshal(
93 inp={
94 "sampleid": fields.Int(
95 metadata={"description": "The sampleid"}, required=True
96 ),
97 "subsampleid": fields.Int(
98 metadata={"description": "Optionally a subsampleid"}
99 ),
100 },
101 out=[
102 [200, MessageSchema(), "Source image created"],
103 [400, ErrorSchema(), "Could not create source image"],
104 ],
105 )
106 def post(self, **kwargs):
107 """Capture an image from a source"""
108 try:
109 ret = self._parent.save_sample_image(**kwargs)
110 if ret:
111 return {"message": "Source image created"}, 200
112 else:
113 return {"error": "Could not create source image"}, 400
115 except Exception as e:
116 logger.exception("Could not save image")
117 log.get("user").exception("Could not save source image", type="hardware")
118 return {"error": f"Could not create source image: {str(e)}"}, 400
121class GenerateMapsResource(ComponentResource):
122 @marshal(
123 inp={
124 "datacollectionid": fields.Int(
125 metadata={"description": "Optionally a datacollectionid"},
126 required=False,
127 ),
128 },
129 out=[
130 [200, MessageSchema(), "Maps generated"],
131 [400, ErrorSchema(), "Could not create maps"],
132 ],
133 )
134 def post(self, subsampleid, **kwargs):
135 """Generate a new map for a subsampleid"""
136 maps = self._parent.generate_maps(
137 subsampleid, datacollectionid=kwargs.get("datacollectionid", None)
138 )
139 if maps:
140 return {"message": "Maps created"}, 200
141 else:
142 return {"error": "Could not create maps"}, 400
145class CreateMapAdditionalResource(ComponentResource):
146 @marshal(
147 inp=MapAdditionalSchema,
148 out=[
149 [200, MessageSchema(), "Map generated"],
150 [400, ErrorSchema(), "Could not create map"],
151 ],
152 )
153 def post(self, **kwargs):
154 """Generate a new map from additional scalars"""
155 amap = self._parent.generate_additional_map(**kwargs)
156 if amap:
157 return {"message": "Map created"}, 200
158 else:
159 return {"error": "Could not create additional map"}, 400
162class MoveResource(ComponentResource):
163 @require_control
164 @marshal(
165 inp={
166 "x": fields.Float(required=True, metadata={"title": "X Position"}),
167 "y": fields.Float(required=True, metadata={"title": "Y Position"}),
168 },
169 out=[
170 [200, MessageSchema(), "Move to position ok"],
171 [400, ErrorSchema(), "Could not move to position"],
172 ],
173 )
174 def post(self, **kwargs):
175 """Move the cursor position to the current origin marking position"""
176 try:
177 self._parent.move(kwargs)
178 return {"message": "ok"}, 200
179 except Exception as e:
180 message = f"Couldn't move to position: {str(e)}"
181 log.get("user").error(message, type="hardware")
182 return {"error": message}, 400
185class MoveToResource(ComponentResource):
186 @require_control
187 @marshal(
188 out=[
189 [200, MessageSchema(), "Moved to subsample"],
190 [400, ErrorSchema(), "Could not move to subsample"],
191 ],
192 )
193 def post(self, subsampleid, **kwargs):
194 """Move the specified subsample to the current origin marking position"""
195 try:
196 self._parent.move_to(subsampleid)
197 return {"message": "ok"}, 200
198 except Exception as e:
199 message = f"Couldn't move to subsample `{subsampleid}`: {str(e)}"
200 log.get("user").error(message, type="hardware")
201 return {"error": message}, 400
204class MoveToReferenceResource(ComponentResource):
205 @require_control
206 @marshal(
207 inp={
208 "x": fields.Float(required=True, metadata={"title": "X Position"}),
209 "y": fields.Float(required=True, metadata={"title": "Y Position"}),
210 "execute": fields.Bool(),
211 },
212 out=[
213 [200, MoveToReferenceSchema(), "Moved to position"],
214 [400, ErrorSchema(), "Could not move to position"],
215 ],
216 )
217 def post(self, **kwargs):
218 """Move the cursor position to a position from a reference image"""
219 try:
220 execute = kwargs.pop("execute", True)
221 positions = self._parent.move_to_reference(kwargs, execute=execute)
222 print("positions", positions)
223 return {"moveid": f"move{time.time()}", "positions": positions}, 200
224 except Exception as e:
225 logger.exception("Couldnt move to reference position")
226 return {"error": str(e)}, 400
229class SelectReferenceMatrixResource(ComponentResource):
230 @require_control
231 @marshal(
232 inp={
233 "sampleactionid": fields.Int(
234 required=True, metadata={"title": "Sample Action"}
235 ),
236 },
237 out=[
238 [200, SelectMatrixSchema(), "Calculated transformation matrix"],
239 [400, ErrorSchema(), "Could not calculate transformation matrix"],
240 ],
241 )
242 def post(self, sampleactionid, **kwargs):
243 """Select a sampleaction and calculate transformation matrix"""
244 try:
245 self._parent.select_reference_matrix(sampleactionid)
246 return {"matrixid": f"matrix{time.time()}"}, 200
247 except Exception as e:
248 logger.exception("Couldnt select reference matrix")
249 return {"error": str(e)}, 400
252class ExportReferenceResource(ComponentResource):
253 @require_control
254 @marshal(
255 inp=ExportReferenceSchema,
256 out=[
257 [200, MessageSchema(), "Exported reference to sample image"],
258 [400, ErrorSchema(), "Could not export reference to sample image"],
259 ],
260 )
261 def post(self, sampleactionid, crop=None):
262 """Export a reference image to a sample image
264 Transforms the image into the current 2dview coordinate space
265 using the selected transformation matrix
266 """
267 try:
268 self._parent.export_reference_to_sampleimage(sampleactionid, crop=crop)
269 return {"message": "Exported reference to sample image"}, 200
270 except Exception as e:
271 logger.exception("Could not export reference to sample image")
272 return {"error": str(e)}, 400
275class MosaicResource(ComponentResource):
276 @require_control
277 @actor("mosaic", enqueue=False, preprocess=True)
278 def post(self, **kwargs):
279 """Create a tiled mosaic actor"""
280 pass
282 def preprocess(self, **kwargs):
283 kwargs["absol"] = self._parent.get_absolute_fp(
284 {"x": kwargs["x1"], "y": kwargs["y1"]},
285 {"x": kwargs["x2"], "y": kwargs["y2"]},
286 )
288 sample = self._metadata.get_samples(sampleid=kwargs["sampleid"])
289 if not sample:
290 raise AttributeError(f"No such sample {kwargs['sampleid']}")
292 sessionid = g.blsession.get("sessionid")
294 def save_image(x, y):
295 return self._parent.save_image(
296 sessionid=sessionid,
297 sampleid=kwargs["sampleid"],
298 file_prefix=f"mosaic_{x}_{y}_",
299 )
301 kwargs["sessionid"] = sessionid
302 kwargs["save"] = save_image
303 kwargs["camera"] = self._parent.origin_defining_source.device
305 return kwargs
308class UploadImage(ComponentResource):
309 @marshal(
310 inp={
311 "image": fields.Str(
312 required=True, metadata={"description": "Base64 encoded image"}
313 ),
314 "sampleid": fields.Int(
315 required=True, metadata={"description": "Sample this image belongs to"}
316 ),
317 },
318 out=[
319 [200, MessageSchema(), "Image uploaded"],
320 [400, ErrorSchema(), "Could not upload image"],
321 ],
322 )
323 def post(self, **kwargs):
324 """Upload an image and send it to an actor"""
325 success = self._parent.upload_image(**kwargs)
326 if success:
327 return {"message": "Image uploaded"}, 200
328 else:
329 return {"error": "Could not upload image"}, 400
332class AutoFocusImageResource(ComponentResource):
333 @require_control
334 @actor("autofocus", enqueue=False, preprocess=True)
335 def post(self, **kwargs):
336 """Autofocus the sample image"""
337 pass
339 def preprocess(self, **kwargs):
340 kwargs["camera"] = self._parent.origin_defining_source.device
341 kwargs["z_increment"] = self._parent.origin_defining_source._get_from_config(
342 "motor_z_autofocus_increment"
343 )
344 kwargs["z_iterations"] = self._parent.origin_defining_source._get_from_config(
345 "motor_z_autofocus_iterations"
346 )
347 kwargs["z_motor"] = self._parent.origin_defining_source._get_hwobj_from_config(
348 "motor_z"
349 )
351 return kwargs
354class ExportSubSamplesResource(ComponentResource):
355 @marshal(
356 inp={
357 "subsampleids": fields.List(
358 fields.Int(),
359 required=True,
360 metadata={"description": "A list of subsamples to export"},
361 )
362 },
363 out=[
364 [200, MessageSchema(), "Subsamples exported"],
365 [400, ErrorSchema(), "Could not export subsamples"],
366 ],
367 )
368 def post(self, **kwargs):
369 """Export the selected subsamples to json"""
370 try:
371 dirname = self._parent.export_subsamples(kwargs["subsampleids"])
372 message = f"Sub samples exported to '{dirname}'"
373 log.get("user").info(message, type="actor")
374 return {"message": message}, 200
375 except Exception as e:
376 return {"error": f"Could not export subsamples: {str(e)}"}, 400
379class MapSettingsResource(ComponentResource):
380 @marshal(out=[[200, MapSettings(), "Map Settings"]])
381 def get(self):
382 return {
383 "during_scan": self._parent._generate_during_scan,
384 "scalar_maps": self._parent._scalar_maps,
385 }
387 @marshal(
388 inp=MapSettings,
389 out=[
390 [200, MapSettings(), "Updated map settings"],
391 [400, ErrorSchema(), "Could not update map settings"],
392 ],
393 )
394 def patch(self, **kwargs):
395 if kwargs.get("during_scan") is not None:
396 self._parent._generate_during_scan = kwargs["during_scan"]
398 if kwargs.get("scalar_maps") is not None:
399 self._parent._scalar_maps = kwargs["scalar_maps"]
401 return {
402 "during_scan": self._parent._generate_during_scan,
403 "scalar_maps": self._parent._scalar_maps,
404 }, 200
407class ReferenceImageResource(ComponentResource):
408 @require_control
409 @actor("reference", enqueue=False, preprocess=True)
410 def post(self, **kwargs):
411 """Import a reference image of the sample"""
412 pass
414 def preprocess(self, **kwargs):
415 if "sampleid" not in kwargs:
416 raise AttributeError("No sample provided")
418 sample = self._metadata.get_samples(sampleid=kwargs["sampleid"])
419 if not sample:
420 raise AttributeError(f"No such sample {kwargs['sampleid']}")
422 kwargs["sessionid"] = g.blsession.get("sessionid")
423 return kwargs
426class Imageviewer(Component, DCUtilsMixin):
427 _actors = [
428 "createmap",
429 "createadditional",
430 "mosaic",
431 "move",
432 "upload_canvas",
433 "autofocus",
434 "export",
435 "reference",
436 ]
437 _config_export = ["options", "scantypes", "upload_canvas"]
439 def setup(self):
440 self._scan_actors = []
441 self._map_actors = []
442 self._in_generate = False
443 self._generate_during_scan = self._config.get("generate_maps_during_scan", True)
444 self._scalar_maps = self._config.get("automatic_scalar_maps", [])
446 self.register_route(SourcesResource, "/sources")
447 self.register_route(SourcesSettingsResource, "/sources/origin")
448 self.register_route(SourceImageResource, "/sources/image")
449 self.register_route(MapSettingsResource, "/maps")
450 self.register_route(GenerateMapsResource, "/maps/generate/<int:subsampleid>")
451 self.register_route(CreateMapAdditionalResource, "/maps/additional")
452 self.register_route(MoveResource, "/move")
453 self.register_route(MoveToReferenceResource, "/move/reference")
454 self.register_route(SelectReferenceMatrixResource, "/move/reference/matrix")
455 self.register_route(MoveToResource, "/move/<int:subsampleid>")
456 self.register_route(UploadImage, "/image/<int:sampleid>")
457 self.register_route(ExportSubSamplesResource, "/export")
458 self.register_actor_route(MosaicResource, "/mosaic")
459 self.register_actor_route(ReferenceImageResource, "/reference")
460 self.register_actor_route(ExportReferenceResource, "/reference/export")
461 self.register_actor_route(AutoFocusImageResource, "/sources/autofocus")
462 self._generate_scan_actors()
463 self._sources: List[Source] = []
464 for i, src in enumerate(self._config["sources"]):
465 self._sources.append(
466 Source(
467 src,
468 i + 1,
469 self._hardware,
470 self.emit,
471 config_file=self._config.resource,
472 )
473 )
474 self._create_maps = self._config.get("createmaps", {})
475 self._check_running_actors = True
476 self._check_actor_thread = gevent.spawn(self.check_running_actors)
478 self._reference_matrix = None
479 self._reference_inverse_matrix = None
481 def reload(self):
482 self._generate_scan_actors()
483 for i, src in enumerate(self._config["sources"]):
484 if i < len(self._sources):
485 self._sources[i].update_config(src)
486 else:
487 self._sources.append(
488 Source(
489 src,
490 i + 1,
491 self._hardware,
492 self.emit,
493 config_file=self._config.resource,
494 )
495 )
497 def _generate_scan_actors(self):
498 """Dynamically generate scan actor resources"""
500 def post(self, **kwargs):
501 pass
503 def preprocess(self, **kwargs):
504 subsample = self._parent._metadata.get_subsamples(kwargs["subsampleid"])
505 if not subsample:
506 raise AttributeError(f"No such subsample {kwargs['subsampleid']}")
507 kwargs["sampleid"] = subsample["sampleid"]
508 kwargs["sample"] = subsample["sample"]
509 sample = self._parent._metadata.get_samples(kwargs["sampleid"])
510 kwargs["extrametadata"] = {
511 **(sample["extrametadata"] if sample["extrametadata"] else {}),
512 "subsample": subsample["extrametadata"],
513 }
514 kwargs["sessionid"] = g.blsession.get("sessionid")
515 # Check whether position can be reached before marking the subsample queued
516 # as this can raise
517 absol = self._parent.get_absolute(kwargs["subsampleid"])
518 (
519 kwargs["containerqueuesampleid"],
520 kwargs["datacollectionplanid"],
521 ) = self._parent._metadata.queue_subsample(
522 kwargs["subsampleid"], scanparameters=kwargs
523 )
524 kwargs["absol"] = absol
525 kwargs["before_scan_starts"] = self._parent.before_scan_starts
526 kwargs["update_datacollection"] = self._parent.update_datacollection
527 kwargs["next_datacollection"] = self._parent.next_datacollection
528 kwargs["generate_maps"] = self._parent.generate_maps
529 kwargs["open_attachment"] = self._parent._open_dc_attachment
530 kwargs["add_scanqualityindicators"] = self._parent.add_scanqualityindicators
531 kwargs["scans"] = self._parent.get_component("scans")
532 kwargs["beamsize"] = self._parent.beamsize
534 def get_rois():
535 return {
536 "rois": self._metadata.get_xrf_map_rois(
537 sampleid=kwargs["sampleid"],
538 no_context=True,
539 )["rows"],
540 "conversion": self._parent.get_component("scans")._config["mca"][
541 "conversion"
542 ],
543 }
545 kwargs["get_rois"] = get_rois
547 kwargs["enqueue"] = kwargs.get("enqueue", True)
548 return kwargs
550 for key, scans in self._config.get("scantypes", {}).items():
551 for scanname in scans:
552 if scanname in self._actors:
553 continue
555 self._actors.append(scanname)
556 self._scan_actors.append(scanname)
557 act_res = type(
558 scanname,
559 (ComponentResource,),
560 {
561 "post": require_control(actor(scanname, preprocess=True)(post)),
562 "preprocess": preprocess,
563 },
564 )
565 self.register_actor_route(act_res, f"/scan/{scanname}")
567 def before_scan_starts(self, actor, save_image=True):
568 """Saving directory is created"""
569 if actor.get("datacollectionid"):
570 self._save_dc_params(actor)
571 if save_image:
572 self._save_dc_image(actor)
574 def _save_dc_image(self, actor):
575 """Save an image for a datacollection
577 Includes origin and scalebar, and subsample location.
578 Should ideally be called once the subsample has been moved to the
579 origin location
580 """
581 try:
582 details = self.save_image(
583 sessionid=actor["sessionid"],
584 sampleid=actor["sampleid"],
585 subsampleid=actor["subsampleid"],
586 savemeta=False,
587 annotate=True,
588 file_prefix="snapshot1_",
589 )
591 extra = {}
592 if details.get("subsample"):
593 subsample = details["subsample"]
594 extra["pixelspermicronx"] = details["scale"]["x"] * 1e-9 / 1e-6
595 extra["pixelspermicrony"] = details["scale"]["y"] * 1e-9 / 1e-6
596 extra["snapshot_offsetxpixel"] = subsample["x"]
597 extra["snapshot_offsetypixel"] = subsample["y"]
599 self.update_datacollection(
600 actor, xtalsnapshotfullpath1=details["path"], **extra
601 )
602 except Exception:
603 logger.exception("Could not save image")
604 log.get("user").exception(
605 "Could not save data collection image", type="actor"
606 )
608 def save_sample_image(self, **kwargs):
609 """Saves a sample image
611 Sets up file saving, and saves an image from the source image
613 Kwargs:
614 sampleid (int): Sample id
615 Returns:
616 path (str): Path to the new image
617 """
618 self._saving.set_filename(
619 set_metadata=False,
620 extra_saving_args=self._config.get(
621 "sample_image_saving", {"data_filename": "{sampleid.name}_image{time}"}
622 ),
623 **{
624 "sampleid": kwargs["sampleid"],
625 "sessionid": g.blsession.get("sessionid"),
626 "time": int(time.time()),
627 },
628 )
629 self._saving.create_root_path()
630 return self.save_image(
631 sampleid=kwargs.get("sampleid", None),
632 subsampleid=kwargs.get("subsampleid", None),
633 sessionid=g.blsession.get("sessionid"),
634 file_prefix="sampleimage_",
635 )
637 def upload_image(self, **kwargs):
638 """Send an image to an actor
640 Kwargs:
641 sampleid (int): The associated sampleid
642 image (str): The base64 encoded image
643 """
644 sample = self._metadata.get_samples(sampleid=kwargs["sampleid"])
645 self.actor(
646 "upload_canvas",
647 error=self._upload_failed,
648 spawn=True,
649 actargs={"image": kwargs["image"], "sample": sample},
650 )
652 return True
654 def _upload_failed(self, actid, exception, actor):
655 logger.error(
656 f"Could not upload image for {actor['sampleid']} exception was {exception}"
657 )
658 log.get("user").exception(
659 f"Could not upload image for {actor['sampleid']} exception was {exception}",
660 type="queue",
661 )
663 def actor_started(self, actid, actor):
664 """Callback when an actor starts
666 For scan actors this will generate a datacollection
667 """
668 if actor.name in self._scan_actors:
669 self.start_datacollection(actor)
671 if actor.name in ["mosaic", "reference"]:
672 args = {
673 "sessionid": actor["sessionid"],
674 "sampleid": actor["sampleid"],
675 "starttime": datetime.now(),
676 "actiontype": actor.name,
677 }
678 sampleaction = self._metadata.add_sampleaction(**args, no_context=True)
679 actor.update(sampleactionid=sampleaction["sampleactionid"])
681 self._saving.set_filename(
682 extra_saving_args=actor.saving_args, **actor.all_data
683 )
684 self._saving.create_root_path(wait_exists=True)
685 actor.update(base_path=self._saving.dirname)
687 logger.info(f"Actor '{actor.name}' with id '{actid}' started")
689 def actor_success(self, actid, response, actor):
690 """Callback when an actor finishes successfully
692 For scan actors this update the datacollection with the endtime and
693 'success' status
695 For ROI scans it will launch map generation
696 """
697 if actor.name in self._scan_actors:
698 self.update_datacollection(
699 actor, endtime=datetime.now(), runstatus="Successful", emit_end=True
700 )
701 self._save_dc_log(actor)
703 if actor.name in self._create_maps:
704 self.generate_maps(actor["subsampleid"], actor["datacollectionid"])
706 if actor.name in ["mosaic", "reference"]:
707 snapshot = {}
708 if actor.get("full"):
709 snapshot["xtalsnapshotafter"] = self.save_full_mosaic(actor["full"])
711 self._metadata.update_sampleaction(
712 sampleactionid=actor["sampleactionid"],
713 no_context=True,
714 endtimestamp=datetime.now(),
715 status="success",
716 **snapshot,
717 )
719 logger.info(f"Actor '{actor.name}' with id '{actid}' finished")
721 def save_full_mosaic(self, image):
722 """Saves the full mosaic image
724 Also creates a thumbnail
726 Args:
727 image (PIL.Image): The image to save
729 Returns
730 path (str): Path to the newly saved image
731 """
732 directory = self._saving.dirname
733 if self._config.get("image_subdirectory"):
734 directory = os.path.join(directory, self._config.get("image_subdirectory"))
735 if not os.path.exists(directory):
736 os.makedirs(directory)
738 filename = os.extsep.join([f"mosaic_full_{time.time()}", "png"])
739 path = os.path.join(directory, filename)
740 image.save(path)
741 self._generate_thumb(path)
742 return path
744 def actor_error(self, actid, exception, actor):
745 """Callback when an actor fails
747 For scan actors this will update the datacollection with the end time and
748 'failed' status
749 """
750 status = "Aborted" if isinstance(exception, ComponentActorKilled) else "Failed"
751 if actor.name in self._scan_actors:
752 self.update_datacollection(actor, endtime=datetime.now(), runstatus=status)
753 self._save_dc_log(actor)
754 if status == "Failed":
755 self._save_dc_exception(actor, exception)
756 if actor.name in ["mosaic", "reference"]:
757 self._metadata.update_sampleaction(
758 sampleactionid=actor["sampleactionid"],
759 no_context=True,
760 endtimestamp=datetime.now(),
761 status="error",
762 message=str(exception),
763 )
764 if status == "Failed":
765 logger.error(f"Actor '{actor.name}' with id '{actid}' failed")
766 else:
767 logger.info(f"Actor '{actor.name}' with id '{actid}' aborted")
769 def actor_remove(self, actid, actor):
770 """Callback when an actor is removed from the queue
772 For scan actors this will remove the item from the database queue
773 """
774 if actor.name in self._scan_actors:
775 self._metadata.unqueue_subsample(
776 actor["subsampleid"],
777 containerqueuesampleid=actor["containerqueuesampleid"],
778 no_context=True,
779 )
781 def check_running_actors(self):
782 """Periodicically check for any running actors
784 This is used to trigger automated downstream procesing, i.e.
785 map generation so that long scans can be followed
786 """
787 logger.debug("Starting periodic actor checker")
788 while self._check_running_actors:
789 if self._generate_during_scan:
790 running_copy = self._running_actors.copy()
791 for actid, actall in running_copy.items():
792 actor = actall[0]
793 if actor.name in self._create_maps:
794 if (
795 actor.get("subsampleid")
796 and actor.get("datacollectionid")
797 and actor.get("datacollectionnumber")
798 ):
799 logger.debug(
800 f"Re/generating maps for {actor.name} dcid:{actor['datacollectionid']}"
801 )
802 self.generate_maps(
803 actor["subsampleid"],
804 actor["datacollectionid"],
805 auto=True,
806 )
807 try:
808 time.sleep(self._config["regenerate_interval"])
809 except KeyError:
810 time.sleep(60)
812 def generate_maps(self, subsampleid, datacollectionid=None, auto=False):
813 """Launch a series of actors to generate maps for each of the MCA ROIs"""
814 self._in_generate = True
816 dcs = self._metadata.get_datacollections(
817 datacollectionid, subsampleid=subsampleid, no_context=True, ungroup=True
818 )
819 scans: ScanSource = self.get_component("scans")
821 if datacollectionid:
822 dcs = [dcs]
823 else:
824 dcs = dcs["rows"]
826 existing = self._metadata.get_xrf_maps(
827 subsampleid=subsampleid, no_context=True
828 )["rows"]
830 if not auto:
831 self.emit(
832 "message",
833 {"type": "generate_maps", "status": "started"},
834 )
835 log.get("user").info(
836 "Starting map generation",
837 type="actor",
838 )
840 count = 0
841 for dc in dcs:
842 running = False
843 for actid in self._map_actors:
844 actall = self._running_actors.get(actid)
845 if not actall:
846 continue
848 actor = actall[0]
850 if (
851 actor["subsampleid"] == subsampleid
852 and actor["datacollectionid"] == dc["datacollectionid"]
853 ):
854 running = True
855 break
857 if running:
858 logger.info(
859 f"Generate map actor already running for subsample {subsampleid} datacollection {dc['datacollectionid']}"
860 )
861 continue
863 rois = self._metadata.get_xrf_map_rois(
864 sampleid=dc["sampleid"], no_context=True
865 )["rows"]
866 scan = scans.get_scans(scanid=dc["datacollectionnumber"])
867 if scan.get("group"):
868 spectra = []
869 scalars = []
870 for child in scan["children"]:
871 spectra.append(
872 scans.get_scan_spectra(child["scanid"], allpoints=True)
873 )
874 scalars.append(
875 scans.get_scan_data(
876 child["scanid"], per_page=1e10, all_scalars=True
877 )
878 )
880 else:
881 spectra = scans.get_scan_spectra(
882 dc["datacollectionnumber"], allpoints=True
883 )
884 scalars = scans.get_scan_data(
885 dc["datacollectionnumber"], per_page=1e10, all_scalars=True
886 )
888 if spectra and scalars:
889 self._map_actors.append(
890 self.actor(
891 "createmap",
892 spawn=True,
893 success=self._append_map,
894 error=self._map_failed,
895 actargs={
896 "group": scan.get("group"),
897 "datacollection": dc,
898 "datacollectionid": dc["datacollectionid"],
899 "datacollectionnumber": dc["datacollectionnumber"],
900 "subsampleid": subsampleid,
901 "rois": rois,
902 "spectra": spectra,
903 "scalars": scalars,
904 },
905 )
906 )
907 count += 1
908 else:
909 logger.warning(
910 f"Generate Map: Cant get spectra for scan {dc['datacollectionnumber']} (datacollectionid: {dc['datacollectionid']})"
911 )
912 log.get("user").warning(
913 f"Cant get spectra for scan {dc['datacollectionnumber']} (datacollectionid: {dc['datacollectionid']})",
914 type="queue",
915 )
917 # Update scalar maps
918 if scalars:
919 for scalar_name in self._scalar_maps:
920 for existing_map in existing:
921 if (
922 existing_map["scalar"] == scalar_name
923 and existing_map["datacollectionid"]
924 == dc["datacollectionid"]
925 ):
926 break
927 else:
928 self.generate_additional_map(
929 datacollectionid=dc["datacollectionid"],
930 scalars=[scalar_name],
931 no_context=True,
932 )
934 scalars_to_update = {}
935 for m in existing:
936 if m["scalar"] and m["datacollectionid"] == dc["datacollectionid"]:
937 if scan.get("group"):
938 scalars_to_update[m["scalar"]] = m["mapid"]
939 else:
940 new_data = self._get_additional_map(
941 m["scalar"], scalars=scalars
942 )
943 if new_data:
944 points = len(new_data) - new_data.count(-1)
945 self._metadata.update_xrf_map(
946 mapid=m["mapid"],
947 data=new_data,
948 points=points,
949 no_context=True,
950 )
952 if scalars_to_update:
953 self.actor(
954 "createadditional",
955 spawn=True,
956 success=self._update_additional_maps,
957 actargs={
958 "datacollectionid": dc["datacollectionid"],
959 "datacollection": dc,
960 "selected_scalars": scalars_to_update.keys(),
961 "scalars_to_update": scalars_to_update,
962 "scalars": scalars,
963 },
964 )
966 self._in_generate = False
968 if count == 0 and not auto:
969 self.emit(
970 "message",
971 {
972 "type": "generate_maps",
973 "status": "warning",
974 "message": "No maps to generate, check the log",
975 },
976 )
977 log.get("user").info(
978 "No maps to generation",
979 type="actor",
980 )
982 return True
984 def _map_failed(self, actid, exception, actor):
985 logger.error(
986 f"Could not generate map for scan {actor['datacollectionnumber']} (datacollectionid: {actor['datacollectionid']}), exception was {exception}"
987 )
988 log.get("user").exception(
989 f"Could not generate map for scan {actor['datacollectionnumber']} (datacollectionid: {actor['datacollectionid']})",
990 type="queue",
991 )
992 self._update_map_actor_status(actid)
994 def _append_map(self, actid, maps, actor):
995 """Add new map to the maplist
997 Will try to updating an existing map if it matched dcid and maproiid
998 """
999 dc = self._metadata.get_datacollections(
1000 actor["datacollectionid"], subsampleid=actor["subsampleid"], no_context=True
1001 )
1003 existing = self._metadata.get_xrf_maps(
1004 subsampleid=dc["subsampleid"], no_context=True
1005 )["rows"]
1007 if not maps:
1008 logger.info("No maps generated to append")
1009 return
1011 # Create / update ROI maps
1012 for m in maps[0]["maps"]:
1013 exists = False
1014 for ex in existing:
1015 if (
1016 dc["datacollectionid"] == ex["datacollectionid"]
1017 and m["maproiid"] == ex["maproiid"]
1018 ):
1019 mapid = ex["mapid"]
1020 points = len(m["data"]) - m["data"].count(-1)
1021 self._metadata.update_xrf_map(
1022 mapid=ex["mapid"],
1023 data=m["data"],
1024 points=points,
1025 no_context=True,
1026 )
1027 exists = True
1028 break
1030 if not exists:
1031 newmap = self._metadata.add_xrf_map(
1032 maproiid=m["maproiid"],
1033 datacollectionid=dc["datacollectionid"],
1034 data=m["data"],
1035 no_context=True,
1036 )
1037 if not newmap:
1038 continue
1040 mapid = newmap["mapid"]
1042 self.emit(
1043 "message",
1044 {
1045 "type": "map",
1046 "mapid": mapid,
1047 "sampleid": dc["sampleid"],
1048 "subsampleid": dc["subsampleid"],
1049 },
1050 )
1052 self._update_map_actor_status(actid)
1054 def _update_map_actor_status(self, actid):
1055 self._map_actors.remove(actid)
1057 if self._in_generate:
1058 return
1060 if len(self._map_actors) == 0:
1061 self.emit(
1062 "message",
1063 {"type": "generate_maps", "status": "finished"},
1064 )
1066 log.get("user").info(
1067 "Map generation complete",
1068 type="actor",
1069 )
1070 else:
1071 self.emit(
1072 "message",
1073 {
1074 "type": "generate_maps",
1075 "status": "progress",
1076 "remaining": len(self._map_actors),
1077 },
1078 )
1080 def _get_additional_map(self, scalar, scalars):
1081 """Get data for an additional map
1083 Args:
1084 scalar (str): The key for scalar data to use
1085 scalars (dict): Dict of scan data scalars
1087 Returns:
1088 data (ndarray): The map data
1089 """
1090 if scalar in scalars["data"]:
1091 data = scalars["data"][scalar]["data"]
1093 if not data:
1094 logger.warning(f"Scalar {scalar} data length is zero")
1095 return
1097 if len(data) < scalars["npoints"]:
1098 missing = scalars["npoints"] - len(data)
1099 data.extend([-1 for x in range(missing)])
1101 return data
1103 else:
1104 logger.warning(f"Cannot find scalar {scalar} in scan data")
1106 def generate_additional_map(self, **kwargs):
1107 """Generate additional maps based on scan scalars"""
1108 dc = self._metadata.get_datacollections(
1109 datacollectionid=kwargs["datacollectionid"],
1110 no_context=kwargs.get("no_context"),
1111 )
1113 if not dc:
1114 return
1116 scans: ScanSource = self.get_component("scans")
1117 scan = scans.get_scans(scanid=dc["datacollectionnumber"])
1118 if scan.get("group"):
1119 scalars = []
1120 for child in scan["children"]:
1121 scalars.append(
1122 scans.get_scan_data(
1123 child["scanid"], per_page=1e10, all_scalars=True
1124 )
1125 )
1127 self.actor(
1128 "createadditional",
1129 spawn=True,
1130 success=self._append_additional_maps,
1131 actargs={
1132 "datacollectionid": dc["datacollectionid"],
1133 "datacollection": dc,
1134 "selected_scalars": kwargs["scalars"],
1135 "scalars": scalars,
1136 },
1137 )
1139 else:
1140 scalars = scans.get_scan_data(
1141 scanid=dc["datacollectionnumber"],
1142 per_page=1e10,
1143 scalars=kwargs["scalars"],
1144 )
1146 if not scalars:
1147 logger.warning(f"Scan id {dc['datacollectionnumber']} is not available")
1148 return
1150 for scalar in kwargs["scalars"]:
1151 data = self._get_additional_map(scalar=scalar, scalars=scalars)
1152 if data:
1153 roi = self._metadata.add_xrf_map_roi_scalar(scalar=scalar)
1154 self._metadata.add_xrf_map(
1155 maproiid=roi["maproiid"],
1156 datacollectionid=dc["datacollectionid"],
1157 data=data,
1158 no_context=True,
1159 )
1161 return True
1163 def _append_additional_maps(self, actid, maps, actor):
1164 """Add new additional map to the maplist (from a group)"""
1165 for map in maps:
1166 roi = self._metadata.add_xrf_map_roi_scalar(scalar=map["scalar"])
1167 self._metadata.add_xrf_map(
1168 maproiid=roi["maproiid"],
1169 datacollectionid=actor["datacollectionid"],
1170 data=map["data"],
1171 no_context=True,
1172 )
1174 def _update_additional_maps(self, actid, maps, actor):
1175 """Update an additional map (from a group)"""
1176 for scalar, mapid in actor["scalars_to_update"].items():
1177 for map_ in maps:
1178 if map_["scalar"] == scalar:
1179 self._metadata.update_xrf_map(
1180 mapid=mapid,
1181 data=map_["data"],
1182 points=len(map_["data"]),
1183 no_context=True,
1184 )
1186 def get_sources(self):
1187 """Return list of image sources"""
1188 sources = [src.info() for src in self._sources]
1189 return {"total": len(sources), "rows": sources}
1191 def move(self, args):
1192 """Move the source image"""
1193 absol = self.get_absolute_fp(args)
1194 self.actor("move", spawn=True, actargs={"absol": absol})
1195 return True
1197 def _get_matched_positions(self, sampleactionid: int):
1198 positions = self._metadata.get_sampleaction_positions(
1199 sampleactionid=sampleactionid
1200 )
1202 refs = {}
1203 reals = {}
1204 for position in positions["rows"]:
1205 if position["type"] == "reference":
1206 refs[position["id"]] = position
1207 if position["type"] == "real":
1208 reals[position["id"]] = position
1210 ref_keys = set(refs.keys())
1211 real_keys = set(reals.keys())
1212 if ref_keys != real_keys:
1213 raise RuntimeError(
1214 f"Real and Reference positions do not match: missing refs: {list(real_keys - ref_keys)}, missing reals: {list(ref_keys - real_keys)}"
1215 )
1217 # Stolen from:
1218 # https://gitlab.esrf.fr/id16/LineControl/-/blob/master/src/linecontrol/widget/cool/FluoSampleRegistration.py#L263
1219 nbref = len(ref_keys)
1220 pfrom = numpy.ones((nbref, 3), dtype=float)
1221 pto = numpy.ones((nbref, 3), dtype=float)
1222 for i, position_id in enumerate(refs.keys()):
1223 pfrom[i, 0:2] = (refs[position_id]["posx"], refs[position_id]["posy"])
1224 pto[i, 0:2] = (
1225 reals[position_id]["posx"],
1226 reals[position_id]["posy"],
1227 )
1229 return pfrom, pto
1231 def select_reference_matrix(self, sampleactionid):
1232 """Select a sampleaction (reference image) to calculate the transfomation matrix from"""
1233 pfrom, pto = self._get_matched_positions(sampleactionid)
1234 self._reference_matrix = calculate_transform_matrix(pfrom, pto)
1236 try:
1237 self._reference_inverse_matrix = numpy.linalg.inv(self._reference_matrix)
1238 except Exception:
1239 raise RuntimeError("Could not calculate inverse matrix")
1241 with numpy.printoptions(precision=3, suppress=True):
1242 log.get("user").info(
1243 f"Transformation matrix calculated:\n{self._reference_matrix}",
1244 type="actor",
1245 )
1247 src = self.origin_defining_source
1248 src.set_reference_inverse_matrix(self._reference_inverse_matrix)
1250 def move_to_reference(self, pos, execute):
1251 """Move the source image to a reference position"""
1252 if not isinstance(self._reference_matrix, numpy.ndarray):
1253 raise RuntimeError("No reference matrix computed")
1255 transformed_pos = numpy.dot(self._reference_matrix, (pos["x"], pos["y"], 1))
1256 absol = self.get_absolute_fp({"x": transformed_pos[0], "y": transformed_pos[1]})
1257 if execute:
1258 self.actor("move", spawn=True, actargs={"absol": absol})
1260 positions = {}
1261 for motor_id, motor in absol["fixed"].items():
1262 positions[motor_id] = {
1263 "motor": motor["motor"].__repr__(),
1264 "destination": round(motor["destination"], 3),
1265 "unit": motor["unit"],
1266 }
1268 if not execute:
1269 log.get("user").info(
1270 f"""Move to:\n{positions['x']['motor']}: {positions['x']['destination']}\n{positions['y']['motor']}: {positions['y']['destination']}""",
1271 type="hardware",
1272 )
1274 return positions
1276 def export_reference_to_sampleimage(self, sampleactionid, crop=None) -> int:
1277 sampleaction = self._metadata.get_sampleactions(sampleactionid=sampleactionid)
1278 refs, reals = self._get_matched_positions(sampleactionid)
1280 export = export_reference_to_sampleimage(
1281 original_path=sampleaction["xtalsnapshotbefore"],
1282 snapshot_path=sampleaction["xtalsnapshotafter"],
1283 reference_points=refs,
1284 vlm_points=reals,
1285 )
1287 sampleimage = self._metadata.add_sampleimage(
1288 no_context=True,
1289 sampleid=sampleaction["sampleid"],
1290 offsetx=int(export.center_x),
1291 # `add_sampleimage` expects offsety to be negative
1292 offsety=int(-1 * export.center_y),
1293 scalex=float(export.scale_factor) * 1e3,
1294 # Because we inverted offsety, invert scaley to flip back
1295 scaley=float(-1 * export.scale_factor) * 1e3,
1296 file=export.image_path,
1297 )
1299 return sampleimage["sampleimageid"]
1301 def _add_exif(self, image_filename: str, metadata: Dict[str, Any]) -> None:
1302 """Write pixel size and offset to exif"""
1303 metadata_string = json.dumps(metadata, indent=2)
1305 img = Image.open(image_filename)
1306 exif = img.getexif()
1307 # TODO: This is Maker, could not get MakerNote to work
1308 # https://github.com/python-pillow/Pillow/blob/main/src/PIL/ExifTags.py#L155
1309 exif.update([(271, metadata_string)])
1310 img.save(image_filename, exif=exif)
1312 def save_image(
1313 self,
1314 sessionid=None,
1315 sampleid=None,
1316 subsampleid=None,
1317 savemeta=True,
1318 annotate=False,
1319 file_prefix="",
1320 ):
1321 """Save an image from the source device, like objects, images are marked
1322 relative to the origin marking position
1324 Args:
1325 sessionid (int): The session id
1326 sampleid (int): The sample id
1327 subsampleid (int): Optionally a subsample id
1328 savemeta (bool): Whether to save this as a sample image
1329 annotate (bool): Whether to annotate this image (origin, scalebar, etc)
1331 Returns:
1332 path (str): The path of the saved image
1333 """
1334 directory = self._saving.dirname
1335 for src in self._sources:
1336 if not src.origin:
1337 continue
1339 filename = os.extsep.join([f"{file_prefix}{time.time()}", "png"])
1341 if self._config.get("image_subdirectory"):
1342 directory = os.path.join(
1343 directory, self._config.get("image_subdirectory")
1344 )
1345 if not os.path.exists(directory):
1346 os.makedirs(directory)
1348 path = os.path.join(directory, filename)
1350 if not src.device.online():
1351 raise RuntimeError("Cannot save image, camera is offline")
1353 src.device.call("save", path)
1355 image_info = src.canvas.vlm_image_info
1356 beam = src.canvas.beam_info
1358 subsample = None
1359 if annotate:
1360 if subsampleid:
1361 subsample = self._metadata.get_subsamples(
1362 subsampleid=subsampleid, no_context=True
1363 )
1364 ann = AnnotateImage(path)
1365 details = ann.annotate(
1366 image_info["center"],
1367 beam["position"],
1368 image_info["pixelsize"],
1369 src.unit,
1370 subsample,
1371 )
1372 subsample = details["subsample"]
1374 if sampleid and savemeta:
1375 self._metadata.add_sampleimage(
1376 no_context=True,
1377 sampleid=sampleid,
1378 offsetx=int(image_info["center"][0]),
1379 offsety=int(image_info["center"][1]),
1380 scalex=float(image_info["pixelsize"][0]),
1381 scaley=float(image_info["pixelsize"][1]),
1382 file=path,
1383 positions=src.get_additional(),
1384 )
1385 self._add_exif(
1386 path,
1387 {
1388 "mppx": float(image_info["pixelsize"][0]),
1389 "mppy": float(image_info["pixelsize"][1]),
1390 "offsetx": int(image_info["center"][0]),
1391 "offsety": int(image_info["center"][1]),
1392 },
1393 )
1395 self._generate_thumb(path)
1396 return {
1397 "path": path,
1398 "subsample": subsample,
1399 "scale": {
1400 "x": float(image_info["pixelsize"][0]),
1401 "y": float(image_info["pixelsize"][1]),
1402 },
1403 }
1405 def move_to(self, subsampleid):
1406 """Move to a specific subsample
1408 Args:
1409 subsampleid (int): The subsample id
1411 Returns:
1412 success (bool): Whether the move was successful
1413 """
1414 absol = self.get_absolute(subsampleid)
1415 self.actor("move", spawn=True, actargs={"absol": absol})
1416 return True
1418 @property
1419 def origin_defining_source(self) -> Source:
1420 for src in self._sources:
1421 if src.origin is True:
1422 return src
1424 @property
1425 def beamsize(self):
1426 src = self.origin_defining_source
1427 if src:
1428 return src.beamsize
1430 def get_absolute_fp(self, pos, pos2=None):
1431 """Return absolute motor positions to bring a position to the centre of view
1433 Args:
1434 pos (dict): Dictionary containing 'x' and 'y' positions
1436 Returns:
1437 absolute (dict): Absolute positions and their associated motors
1438 """
1439 src = self.origin_defining_source
1440 if src is None:
1441 return None
1443 if pos2:
1444 absol = src.canvas.canvas_to_motor(
1445 [[pos["x"], -pos["y"]], [pos2["x"], -pos2["y"]]]
1446 )
1447 else:
1448 absol = src.canvas.canvas_to_motor([pos["x"], -pos["y"]])
1450 absol["axes"] = self.find_axes_from_variable(absol["variable"])
1451 absol["move_to"] = self.move_to_absol
1453 # print("------ get_absolute_fp ------")
1454 # pp.pprint(pos)
1455 # pp.pprint(absol)
1457 return absol
1459 def get_absolute(self, subsampleid):
1460 """Return absolute motor positions to bring a subsample id to the origin marking
1462 Args:
1463 subsampleid (int): The subsample to get the position of
1465 Returns:
1466 absolute (dict): A dictionary of the absolute positions for the subsample
1467 """
1468 src = self.origin_defining_source
1469 if src is None:
1470 return None
1472 obj = self._metadata.get_subsamples(subsampleid)
1473 if obj is None:
1474 return
1476 if obj["type"] == "loi" or obj["type"] == "roi":
1477 pos = [[obj["x"], -obj["y"]], [obj["x2"], -obj["y2"]]]
1478 else:
1479 pos = [obj["x"], -obj["y"]]
1480 absol = src.canvas.canvas_to_motor(pos)
1481 # src.canvas.sampleposition=....
1483 # print("------ get_absolute ------")
1484 # pp.pprint(obj)
1485 # pp.pprint(absol)
1487 if "z" in absol["fixed"]:
1488 del absol["fixed"]["z"]
1490 absol["axes"] = self.find_axes_from_variable(absol["variable"])
1491 absol["move_to_additional"] = src.move_to_additional
1492 absol["positions"] = obj["positions"]
1493 absol["move_to"] = self.move_to_absol
1495 return absol
1497 def find_axes_from_variable(self, variable):
1498 axes = {}
1499 for key, obj in variable.items():
1500 for axis in ["x", "y", "z"]:
1501 if key.startswith(axis):
1502 axes[axis] = obj
1504 return axes
1506 def move_to_absol(self, absol, sequential=False):
1507 all_objs = list(absol["fixed"].values()) + list(
1508 absol.get("variable", {}).values()
1509 )
1510 for obj in all_objs:
1511 if isinstance(obj["destination"], list):
1512 obj["motor"].move(obj["destination"][0])
1513 else:
1514 obj["motor"].move(obj["destination"])
1516 if sequential:
1517 obj["motor"].wait()
1519 for obj in all_objs:
1520 obj["motor"].wait()
1522 def _generate_thumb(self, path):
1523 size = (250, 250)
1524 thumb = Image.open(path)
1525 thumb.thumbnail(size, Image.LANCZOS)
1526 thumb.save(path.replace(".png", "t.png"))
1528 def export_subsamples(self, subsampleids):
1529 subsamples = []
1530 sampleid = None
1531 for subsampleid in subsampleids:
1532 obj = self._metadata.get_subsamples(subsampleid)
1533 sampleid = obj["sampleid"]
1534 sample = self._metadata.get_samples(sampleid)
1535 abs = self.get_absolute(subsampleid)
1537 motor_types = {}
1538 motor_positions = {}
1539 for move_type in ("fixed", "variable"):
1540 motor_positions[move_type] = {}
1542 for motor_type, details in abs[move_type].items():
1543 motor_types[motor_type] = details["motor"].name()
1544 motor_positions[move_type][details["motor"].name()] = {
1545 "destination": details["destination"],
1546 "unit": details["unit"],
1547 "unit_exponent": details["unit_exponent"],
1548 }
1550 subsamples.append(
1551 {
1552 "subsampleid": subsampleid,
1553 "type": obj["type"],
1554 "comments": obj["comments"],
1555 "extrametadata": {
1556 **(sample["extrametadata"] if sample["extrametadata"] else {}),
1557 "subsample": obj["extrametadata"],
1558 },
1559 "additional": abs["positions"],
1560 "motors": motor_positions,
1561 }
1562 )
1564 actor, greenlet = self.actor(
1565 "export",
1566 start=self._set_export_path,
1567 spawn=True,
1568 return_actor=True,
1569 actargs={
1570 "motor_types": motor_types,
1571 "subsamples": subsamples,
1572 "sessionid": g.blsession.get("sessionid"),
1573 "sampleid": sampleid,
1574 "time": int(time.time()),
1575 },
1576 )
1578 greenlet.join()
1579 if actor._failed:
1580 raise actor._exception
1582 return actor["dirname"]
1584 def _set_export_path(self, actid, actor):
1585 self._saving.set_filename(
1586 set_metadata=False,
1587 extra_saving_args=actor.saving_args,
1588 **actor.all_data,
1589 )
1590 self._saving.create_root_path(wait_exists=True)
1591 actor.update(dirname=self._saving.dirname)