@@ -920,6 +920,7 @@ def define_dataset_manual(
920920 image_file_pattern ,
921921 dataset_organisation ,
922922 definition_opts = None ,
923+ list_files = None ,
923924):
924925 """Run "Define Multi-View Dataset" using the "Manual Loader" option.
925926
@@ -938,20 +939,23 @@ def define_dataset_manual(
938939 Looks like "timepoints_=%s-%s channels_=0-%s tiles_=%s-%s"
939940 definition_opts : dict
940941 Dictionary containing the details about the file repartitions.
942+ list_files : list of str, optional
943+ An optional list of file names to pass directly to the manual loader in
944+ "show_list" mode. When provided, the function will include the filenames
945+ in the options string instead of relying on a file pattern; items should
946+ be either full paths or relative to the selected `source_directory`.
941947 """
942-
943- xml_filename = project_filename + ".xml"
948+ # xml_filename = project_filename + ".xml"
944949
945950 if definition_opts is None :
946951 definition_opts = DefinitionOptions ()
947952
948- temp = os .path .join (source_directory , project_filename + "_temp" )
949- os .path .join (temp , project_filename )
953+ show_list_options = "" if not list_files else "show_list " + " " .join (list_files )
950954
951955 options = (
952956 "define_dataset=[Manual Loader (Bioformats based)] "
953957 + "project_filename=["
954- + xml_filename
958+ + project_filename
955959 + "] "
956960 + "_____"
957961 + definition_opts .fmt_acitt_options ()
@@ -961,11 +965,12 @@ def define_dataset_manual(
961965 + " "
962966 + "image_file_pattern="
963967 + image_file_pattern
968+ + " "
964969 + dataset_organisation
965970 + " "
966971 + "calibration_type=[Same voxel-size for all views] "
967972 + "calibration_definition=[Load voxel-size(s) from file(s)] "
968- # + "imglib2_data_container=[ArrayImg (faster)]"
973+ + show_list_options
969974 )
970975
971976 log .debug ("Manual dataset definition options: <%s>" , options )
@@ -1050,7 +1055,7 @@ def resave_as_h5(
10501055 )
10511056
10521057 log .debug ("Resave as HDF5 options: <%s>" , options )
1053- IJ .run ("As HDF5" , str (options ))
1058+ IJ .run ("Resave as HDF5 (local) " , str (options ))
10541059
10551060
10561061def flip_axes (source_xml_file , x = False , y = True , z = False ):
@@ -1614,148 +1619,127 @@ def fuse_dataset(
16141619def fuse_dataset_bdvp (
16151620 project_path ,
16161621 command ,
1617- processing_opts = None ,
16181622 result_path = None ,
1619- compression = "LZW" ,
1623+ fusion_method = "SMOOTH AVERAGE" ,
1624+ range_channels = "" ,
1625+ range_slices = "" ,
1626+ range_frames = "" ,
1627+ n_resolution_levels = 5 ,
1628+ use_lzw_compression = True ,
1629+ split_slices = False ,
1630+ split_channels = False ,
1631+ split_frames = False ,
1632+ override_z_ratio = False ,
1633+ z_ratio = 1.0 ,
1634+ use_interpolation = True ,
16201635):
1621- """Export a BigDataViewer project using the BIOP Kheops exporter.
1636+ """Export a project using the BigDataViewer playground (`bdvp`) exporter.
16221637
1623- Use the BIOP Kheops exporter to convert a BigDataViewer project into
1624- OME-TIFF files, with optional compression .
1638+ Use the BigDataViewer playground / BIOP Kheops exporter to fuse a
1639+ BigDataViewer project and save it as pyramidal OME-TIFF .
16251640
16261641 Parameters
16271642 ----------
16281643 project_path : str
16291644 Full path to the BigDataViewer XML project file.
16301645 command : CommandService
16311646 The Scijava CommandService instance to execute the export command.
1632- processing_opts : ProcessingOptions, optional
1633- Options defining which parts of the dataset to process. If None, default
1634- processing options will be used (process all angles, channels, etc.).
16351647 result_path : str, optional
1636- Path where to store the exported files. If None, files will be saved in
1637- the same directory as the input project.
1638- compression : str, optional
1639- Compression method to use for the TIFF files. Default is "LZW".
1648+ Path where to store the exported files. If `None`, files will be
1649+ saved in the same directory as the input project.
1650+ fusion_method : str, optional
1651+ Fusion method to use for exporting (default `SMOOTH AVERAGE`).
1652+ range_channels : str, optional
1653+ Channels to include in the export. Default is all channels.
1654+ range_slices : str, optional
1655+ Slices to include in the export. Default is all slices.
1656+ range_frames : str, optional
1657+ Frames to include in the export. Default is all frames.
1658+ n_resolution_levels : int, optional
1659+ Number of pyramid resolution levels to use for the export. Default is 5.
1660+ use_lzw_compression : bool, optional
1661+ Compress the output file using LZW. Default is True.
1662+ split_slices : bool, optional
1663+ Split output into separate files for each slice. Default is False.
1664+ split_channels : bool, optional
1665+ Split output into separate files for each channel. Default is False.
1666+ split_frames : bool, optional
1667+ Split output into separate files for each frame. Default is False.
1668+ override_z_ratio : bool, optional
1669+ Override the default `z_ratio` value. Default is False.
1670+ z_ratio : float, optional
1671+ The z ratio to use for the export. Default is 1.0.
1672+ use_interpolation : bool, optional
1673+ Interpolate during fusion (takes ~4x longer). Default is True.
16401674
16411675 Notes
16421676 -----
1643- This function requires the PTBIOP update site to be enabled in Fiji/ImageJ.
1644- """
1645- if processing_opts is None :
1646- processing_opts = ProcessingOptions ()
1677+ Requires the `PTBIOP` update site to be enabled in Fiji/ImageJ.
16471678
1679+ Examples
1680+ --------
1681+ Example 1 - simple export using a CommandService instance available as
1682+ `command`, using the default options and placing the output next to the
1683+ input xml:
1684+
1685+ >>> #@ CommandService command
1686+ >>> xml_input = "/path/to/project.xml"
1687+ >>> fuse_dataset_bdvp(xml_input, command)
1688+
1689+ Example 2 - explicit options using a custom output path, specific channels,
1690+ disabling interpolation and overriding the z-ratio:
1691+
1692+ >>> #@ CommandService command
1693+ >>> xml_input = "/path/to/project.xml"
1694+ >>> out_dir = "/path/to/output_dir"
1695+ >>> fuse_dataset_bdvp(
1696+ ... xml_input,
1697+ ... command,
1698+ ... result_path=out_dir,
1699+ ... fusion_method="SMOOTH AVERAGE",
1700+ ... range_channels="0-1",
1701+ ... n_resolution_levels=4,
1702+ ... use_lzw_compression=False,
1703+ ... split_channels=True,
1704+ ... override_z_ratio=True,
1705+ ... z_ratio=2.0,
1706+ ... use_interpolation=False,
1707+ ... )
1708+ """
16481709 file_info = pathtools .parse_path (project_path )
1710+
16491711 if not result_path :
16501712 result_path = file_info ["path" ]
1651- # if not os.path.exists(result_path):
1652- # os.makedirs(result_path)
16531713
16541714 command .run (
16551715 FuseBigStitcherDatasetIntoOMETiffCommand ,
1656- True ,
1657- "image " ,
1716+ True , # seems to indicate whether to run the command headless or not
1717+ "xml_bigstitcher_file " ,
16581718 project_path ,
1659- "output_dir " ,
1719+ "output_path_directory " ,
16601720 result_path ,
1661- "compression" ,
1662- compression ,
1663- "subset_channels" ,
1664- "" ,
1665- "subset_slices" ,
1666- "" ,
1667- "subset_frames" ,
1668- "" ,
1669- "compress_temp_files" ,
1670- False ,
1671- )
1672-
1673-
1674- def read_metadata_from_xml (xml_path ):
1675- """Extract metadata from a Zeiss Lightsheet microscopy XML file.
1676-
1677- Parse the XML document to retrieve the number of channels, illuminations,
1678- and timepoints from the experiment metadata.
1679-
1680- Parameters
1681- ----------
1682- xml_path : str
1683- Path to the XML metadata file.
1684-
1685- Returns
1686- -------
1687- dict
1688- A dictionary containing the following keys:
1689- - 'channels_count': Number of channels in the dataset
1690- - 'illuminations_count': Number of illumination directions
1691- - 'timepoints_count': Number of timepoints in the dataset
1692-
1693- Examples
1694- --------
1695- >>> metadata = read_metadata_from_xml("/path/to/experiment.xml")
1696- >>> print(metadata["channels_count"])
1697- ... 2
1698- >>> print(metadata["illuminations_count"])
1699- ... 4
1700- >>> print(metadata["timepoints_count"])
1701- ... 1
1702- """
1703- # Use our robust XML parsing function
1704- dbf = DocumentBuilderFactory .newInstance ()
1705- db = dbf .newDocumentBuilder ()
1706-
1707- # Initialize default values
1708- nbr_chnl = 1
1709- nbr_ill = 1
1710- nbr_tp = 1
1711-
1712- reader = None
1713- try :
1714- # This is needed to fix some issues with the micron symbol in the xml file
1715- reader = InputStreamReader (FileInputStream (File (xml_path )))
1716- dom = db .parse (InputSource (reader ))
1717-
1718- # Extract channel and illumination counts
1719- nodeList = dom .getElementsByTagName ("Attributes" )
1720- for i in range (nodeList .getLength ()):
1721- name_attr = nodeList .item (i ).getAttributes ().getNamedItem ("name" )
1722- if name_attr is None :
1723- continue
1724-
1725- node = name_attr .getNodeValue ()
1726- if node == "channel" :
1727- nbr_chnl = int (
1728- nodeList .item (i ).getElementsByTagName ("Channel" ).getLength ()
1729- )
1730- if node == "illumination" :
1731- nbr_ill = int (
1732- nodeList .item (i ).getElementsByTagName ("Illumination" ).getLength ()
1733- )
1734-
1735- # Get timepoints
1736- timepoints_node = dom .getElementsByTagName ("Timepoints" )
1737- if timepoints_node .getLength () > 0 :
1738- last_nodes = timepoints_node .item (0 ).getElementsByTagName ("last" )
1739- if last_nodes .getLength () > 0 :
1740- nbr_tp = int (last_nodes .item (0 ).getTextContent ()) + 1
1741- except Exception as e :
1742- # log.exception includes the traceback when available
1743- try :
1744- log .exception ("Error extracting metadata from XML: %s" , e )
1745- except Exception :
1746- log .error ("Error extracting metadata from XML: %s" , str (e ))
1747- finally :
1748- # Ensure the Java reader is closed to free resources
1749- try :
1750- if reader is not None :
1751- reader .close ()
1752- except Exception :
1753- pass
1754-
1755- xml_metadata = {
1756- "channels_count" : nbr_chnl ,
1757- "illuminations_count" : nbr_ill ,
1758- "timepoints_count" : nbr_tp ,
1759- }
1760-
1761- return xml_metadata
1721+ "range_channels" ,
1722+ range_channels ,
1723+ "range_slices" ,
1724+ range_slices ,
1725+ "range_frames" ,
1726+ range_frames ,
1727+ "n_resolution_levels" ,
1728+ n_resolution_levels ,
1729+ "fusion_method" ,
1730+ fusion_method ,
1731+ "use_lzw_compression" ,
1732+ use_lzw_compression ,
1733+ "split_slices" ,
1734+ split_slices ,
1735+ "split_channels" ,
1736+ split_channels ,
1737+ "split_frames" ,
1738+ split_frames ,
1739+ "override_z_ratio" ,
1740+ override_z_ratio ,
1741+ "z_ratio" ,
1742+ z_ratio ,
1743+ "use_interpolation" ,
1744+ use_interpolation ,
1745+ ).get ()
0 commit comments