core package#

Submodules#

core.dask_cluster module#

Classes and functions for file management

class core.dask_cluster.DaskCluster(requested_nb_nodes, maximum_load=0.6, memory_per_worker=12000)#

Bases: object

Used to manage parallel run thanks the Dask package

create_distributed_client()#

Instance workers

initialize_cluster()#

Defines the number of threads allocated

core.dask_cluster.try_get_client()#

Chek if client is alive

Returns

Client instance or None

Return type

Dask.Client

core.data_file module#

Data files module

Manage files operations, depending of DataManager.

class core.data_file.BlockAlignmentFile(relative_shifts, rms_image, contour)#

Bases: core.data_file.DataFile

delete_data()#
save(folder_path, basename)#
class core.data_file.BothImgRbgFile(image1_uncorrected, image2_corrected_raw)#

Bases: core.data_file.DataFile

delete_data()#
save(folder_path, basename)#

Overlays two images as R and B and saves them to output file

class core.data_file.DataFile(data=None)#

Bases: object

delete_data()#
save(folder_path: str, basename: str)#
class core.data_file.EcsvFile(data)#

Bases: core.data_file.DataFile

save(folder_path: str, basename: str)#
class core.data_file.EqualizationHistogramsFile(i_histogram, lower_threshold)#

Bases: core.data_file.DataFile

delete_data()#
save(folder_path, basename)#

Overlays two images as R and B and saves them to output file

class core.data_file.FocalPlaneMatrixFile(data, title)#

Bases: core.data_file.DataFile

save(folder_path: str, basename: str)#
class core.data_file.JsonFile(data)#

Bases: core.data_file.DataFile

save(folder_path: str, basename: str)#
class core.data_file.NpyFile(npy_data, status: str, cycle='', path='', basename='', label='')#

Bases: core.data_file.DataFile

get_root()#
load()#
save(folder_path: str, basename: str)#
class core.data_file.Png2DFile(data)#

Bases: core.data_file.DataFile

save(folder_path: str, basename: str)#
class core.data_file.RefDiffFile(preprocessed_ref, shifted_img, preprocessed_img)#

Bases: core.data_file.DataFile

delete_data()#
save(folder_path, basename)#

Overlays two images as R and B and saves them to output file

class core.data_file.TifFile(path, basename, ext, label, cycle)#

Bases: object

get_root()#
load()#
core.data_file.save_json(data, file_name)#

Save a python dict as a JSON file

Parameters
  • data (dict) – Data to save

  • file_name (str) – Output JSON file name

core.data_manager module#

Data manager module

Manage writing, reading and checking data.

class core.data_manager.DataManager(data_path: str, md_file: str = '', param_file: Optional[str] = None)#

Bases: object

Single party responsible for communicating data with the system

add_to_processable_labels(label)#
check_roi_uniqueness(roi_name: str)#
create_dict_structure()#
create_out_structure(folder_name: str)#

Create output folder structure for one Feature.

Parameters

folder_name (str) – Relative path name of Feature output folder name

decode_file_parts(file_name)#

decodes variables from an input file. typically, RE takes the form:

“scan_(?P<runNumber>[0-9]+)_(?P<cycle>[w|-]+)_(?P<roi>[0-9]+)_ROI_converted_decon_(?P<channel>[w|-]+)” # pylint: disable=anomalous-backslash-in-string,line-too-long

thus, by running decode_file_parts(file_name) you will get back either an empty dict if the RE were not present in your parameters.json file or a dict as follows if it all worked out fine:

file_parts[‘runNumber’]: runNumber number file_parts[‘cycle’]: cycle string file_parts[‘roi’]: roi number file_parts[‘channel’]: channel string

Parameters

file_name (string) – filename to decode

Return type

Dict with file_parts.

dispatch_files()#

Get all input files and sort by extension type

find_label(filename, channel)#

Decode a filename to find its label (fiducial, DAPI, barcode, RNA, mask)

Parameters

filename (str) – An input data filename

Returns

A label (a type of data)

Return type

str

Raises

ValueError – Label NOT FOUND

find_param_file(param_file: Optional[str] = None)#

Find the user parameters file like parameters.json inside extracted input files.

Returns

Parameters file path

Return type

str

Raises

ValueError – Parameters file NOT FOUND

get_inputs(tif_labels: list[str], npy_labels: list[str])#
get_processable_labels()#
load_reference(required_ref)#
load_user_param()#

Load user parameter JSON file like a Python dict

Returns

Python dict

Return type

dict

Raises

ValueError – file not found

load_user_param_with_structure()#
save_data(results: list[core.data_file.DataFile], feature_folder: str, basename: str)#
save_parameters_loaded()#
set_label_decoder()#
set_labelled_params(labelled_sections)#
set_up()#
core.data_manager.create_folder(folder_path: str)#

Create folder with makedirs from os module. It’s a recursive directory creation function.

Parameters

folder_path (str) – Path name of folder

core.data_manager.extract_files(root: str)#

Extract recursively file informations of all files into a given directory. Note: * filepath is directory path with filename and extension * filename is the name without extension

Parameters

root (str) – The name of root directory

Returns

List of file informations: (filepath, filename, extension)

Return type

List[Tuple(str,str,str)]

core.data_manager.remove_extension(filename: str)#

core.function_caller module#

Module for high level function calling

class core.function_caller.Pipeline(data_m, cmd_list, is_parallel, logger)#

Bases: object

Class for high level function calling

align_images_3d(current_param, label, data_path, registration_params: core.parameters.RegistrationParams, dict_shifts_path, roi_name, z_binning)#
apply_registrations(current_param, label, data_path, registration_params, roi_name, projection_params)#
init_features()#
interpret_cmd_list(cmd_list)#
lauch_dask_scheduler(threads_requested=25, maximum_load=0.8)#
manage_parallel_option(feature, *args, **kwargs)#
run()#
segment_masks(current_param, label, data_path, params: core.parameters.SegmentationParams, align_folder)#
segment_masks_3d(current_param, label, roi_name: str, data_path, segmentation_params, dict_shifts_path, acq_params: core.parameters.AcquisitionParams, reg_params: core.parameters.RegistrationParams)#
segment_sources_3d(current_param, label, roi_name: str, data_path, segmentation_params, dict_shifts_path, acq_params: core.parameters.AcquisitionParams, proj_params: core.parameters.ProjectionParams, reg_params: core.parameters.RegistrationParams)#
set_params_from_cmds()#
core.function_caller.build_matrix(current_param, label, data_path, matrix_params, acq_params: core.parameters.AcquisitionParams)#

Build matrices

Parameters
  • current_param (Parameters) – _description_

  • label (str) – Only ‘barcode’ are accepted

core.function_caller.build_traces(current_param, label, data_path, segmentation_params, matrix_params: core.parameters.MatrixParams, acq_params: core.parameters.AcquisitionParams)#

Build traces

Parameters
  • current_param (Parameters) – _description_

  • label (str) – Only ‘barcode’ are accepted

core.function_caller.filter_localizations(current_param, label, data_path, segmentation_params, matrix_params: core.parameters.MatrixParams)#

Filters barcode localization table

Parameters
  • current_param (Parameters) – _description_

  • label (str) – Only ‘barcode’ are accepted

core.function_caller.get_a_dict_value(d: dict)#
core.function_caller.register_localizations(current_param, label, data_path, local_shifts_path, segmentation_params, reg_params: core.parameters.RegistrationParams, matrix_params: core.parameters.MatrixParams)#

Registers barcode localization table

Parameters
  • current_param (Parameters) – _description_

  • label (str) – Only ‘barcode’ are accepted

core.function_caller.remove_none_from_list(list_with_none: list)#
core.function_caller.run_pattern(feat, f2p, reference_file, m_data_m)#

Generic pattern for both run mode, sequential and parallel. (need to be a function and not a method for parallel running)

Parameters
  • feat (Feature) – A sub-class of Feature

  • f2p (TifFile) – A file object with a load method. TODO: create a mother class File for TifFile to be generic with other type of data files

  • m_data_m (Allow to save outputs) –

core.parameters module#

Classes and functions for file management

class core.parameters.AcquisitionParams(DAPI_channel: str = <factory>, RNA_channel: str = <factory>, barcode_channel: str = <factory>, mask_channel: str = <factory>, fiducialBarcode_channel: str = <factory>, fiducialMask_channel: str = <factory>, fiducialDAPI_channel: str = <factory>, fileNameRegExp: str = <factory>, pixelSizeXY: float = <factory>, pixelSizeZ: float = <factory>, zBinning: int = <factory>, unknown_params: typing.Optional[dataclasses_json.utils.CatchAllVar] = <factory>)#

Bases: object

acquisition section of parameters.json parameter file.

DAPI_channel: str#
RNA_channel: str#
barcode_channel: str#
dataclass_json_config = {'undefined': Undefined.INCLUDE}#
fiducialBarcode_channel: str#
fiducialDAPI_channel: str#
fiducialMask_channel: str#
fileNameRegExp: str#
classmethod from_dict(kvs: Optional[Union[dict, list, str, int, float, bool]], *, infer_missing=False) dataclasses_json.api.A#
classmethod from_json(s: Union[str, bytes, bytearray], *, parse_float=None, parse_int=None, parse_constant=None, infer_missing=False, **kw) dataclasses_json.api.A#
mask_channel: str#
pixelSizeXY: float#
pixelSizeZ: float#
classmethod schema(*, infer_missing: bool = False, only=None, exclude=(), many: bool = False, context=None, load_only=(), dump_only=(), partial: bool = False, unknown=None) dataclasses_json.mm.SchemaF[dataclasses_json.api.A]#
to_dict(encode_json=False) Dict[str, Optional[Union[dict, list, str, int, float, bool]]]#
to_json(*, skipkeys: bool = False, ensure_ascii: bool = True, check_circular: bool = True, allow_nan: bool = True, indent: Optional[Union[int, str]] = None, separators: Optional[Tuple[str, str]] = None, default: Optional[Callable] = None, sort_keys: bool = False, **kw) str#
unknown_params: Optional[dataclasses_json.utils.CatchAllVar]#
zBinning: int#
class core.parameters.MatrixParams(folder: str = <factory>, tracing_method: typing.List[str] = <factory>, mask_expansion: int = <factory>, masks2process: typing.Dict[str, str] = <factory>, flux_min: int = <factory>, flux_min_3D: float = <factory>, KDtree_distance_threshold_mum: int = <factory>, toleranceDrift: typing.Union[int, typing.List[int]] = <factory>, remove_uncorrected_localizations: bool = <factory>, z_offset: float = <factory>, unknown_params: typing.Optional[dataclasses_json.utils.CatchAllVar] = <factory>)#

Bases: object

buildsPWDmatrix section of parameters.json parameter file.

KDtree_distance_threshold_mum: int#
dataclass_json_config = {'undefined': Undefined.INCLUDE}#
flux_min: int#
flux_min_3D: float#
folder: str#
classmethod from_dict(kvs: Optional[Union[dict, list, str, int, float, bool]], *, infer_missing=False) dataclasses_json.api.A#
classmethod from_json(s: Union[str, bytes, bytearray], *, parse_float=None, parse_int=None, parse_constant=None, infer_missing=False, **kw) dataclasses_json.api.A#
mask_expansion: int#
masks2process: Dict[str, str]#
remove_uncorrected_localizations: bool#
classmethod schema(*, infer_missing: bool = False, only=None, exclude=(), many: bool = False, context=None, load_only=(), dump_only=(), partial: bool = False, unknown=None) dataclasses_json.mm.SchemaF[dataclasses_json.api.A]#
to_dict(encode_json=False) Dict[str, Optional[Union[dict, list, str, int, float, bool]]]#
to_json(*, skipkeys: bool = False, ensure_ascii: bool = True, check_circular: bool = True, allow_nan: bool = True, indent: Optional[Union[int, str]] = None, separators: Optional[Tuple[str, str]] = None, default: Optional[Callable] = None, sort_keys: bool = False, **kw) str#
toleranceDrift: Union[int, List[int]]#
tracing_method: List[str]#
unknown_params: Optional[dataclasses_json.utils.CatchAllVar]#
z_offset: float#
class core.parameters.Parameters(raw_dict, root_folder='./', label='')#

Bases: object

Manage all pyHiM parameters. Old way, used before pyHiM restructuration.

complete_with_default(raw_dict)#
decode_file_parts(file_name)#

decodes variables from an input file. typically, RE takes the form:

“scan_(?P<runNumber>[0-9]+)_(?P<cycle>[w|-]+)_(?P<roi>[0-9]+)_ROI_converted_decon_(?P<channel>[w|-]+).tif” # pylint: disable=anomalous-backslash-in-string,line-too-long

thus, by running decode_file_parts(current_param,file_name) you will get back either an empty dict if the RE were not present in your parameters.json file or a dict as follows if it all worked out fine:

file_parts[‘runNumber’]: runNumber number file_parts[‘cycle’]: cycle string file_parts[‘roi’]: roi number file_parts[‘channel’]: channel string

Parameters

file_name (string) – filename to decode

Return type

Dict with file_parts.

find_files_to_process(files_folder)#

Find label-specific filenames from filename list. Save these filenames in self.files_to_process.

Parameters

files_folder (list) – List of files

get_labeled_dict_value(section, param_name)#
get_labelled_params(label_name: str)#
get_sectioned_params(section_name: str)#
static get_standard_parameters()#

Reference of the standard parameters

set_channel(key, default)#

Set channel parameter with a default value

Parameters
  • key (str) – Name like DAPI_channel, barcode_channel, fiducialMask_channel, …

  • default (str) – Like ch00, ch01, …

Returns

Channel value like ‘ch02’

Return type

str

class core.parameters.Params(labelled_dict: dict, sections: List[str])#

Bases: object

highlight_deprecated_params(dict_to_check: dict)#

Warns the user that there are unused/deprecated parameters in his parameters.json

Parameters

dict_to_check (dict) – _description_

print_as_dict()#
class core.parameters.ProjectionParams(folder: str = <factory>, mode: str = <factory>, block_size: int = <factory>, display: bool = <factory>, zmin: int = <factory>, zmax: int = <factory>, zwindows: int = <factory>, window_security: int = <factory>, z_project_option: str = <factory>, unknown_params: typing.Optional[dataclasses_json.utils.CatchAllVar] = <factory>)#

Bases: object

zProject section of parameters.json parameter file.

block_size: int#
dataclass_json_config = {'letter_case': <function camelcase>, 'undefined': Undefined.INCLUDE}#
display: bool#
folder: str#
classmethod from_dict(kvs: Optional[Union[dict, list, str, int, float, bool]], *, infer_missing=False) dataclasses_json.api.A#
classmethod from_json(s: Union[str, bytes, bytearray], *, parse_float=None, parse_int=None, parse_constant=None, infer_missing=False, **kw) dataclasses_json.api.A#
mode: str#
classmethod schema(*, infer_missing: bool = False, only=None, exclude=(), many: bool = False, context=None, load_only=(), dump_only=(), partial: bool = False, unknown=None) dataclasses_json.mm.SchemaF[dataclasses_json.api.A]#
to_dict(encode_json=False) Dict[str, Optional[Union[dict, list, str, int, float, bool]]]#
to_json(*, skipkeys: bool = False, ensure_ascii: bool = True, check_circular: bool = True, allow_nan: bool = True, indent: Optional[Union[int, str]] = None, separators: Optional[Tuple[str, str]] = None, default: Optional[Callable] = None, sort_keys: bool = False, **kw) str#
unknown_params: Optional[dataclasses_json.utils.CatchAllVar]#
window_security: int#
z_project_option: str#
zmax: int#
zmin: int#
zwindows: int#
class core.parameters.RegistrationParams(register_global_folder: str = <factory>, register_local_folder: str = <factory>, outputFile: str = <factory>, referenceFiducial: str = <factory>, localAlignment: str = <factory>, alignByBlock: bool = <factory>, tolerance: float = <factory>, lower_threshold: float = <factory>, higher_threshold: float = <factory>, _3D_lower_threshold: typing.Union[float, str] = <factory>, _3D_higher_threshold: typing.Union[float, str] = <factory>, background_sigma: float = <factory>, blockSize: int = <factory>, blockSizeXY: int = <factory>, upsample_factor: int = <factory>, unknown_params: typing.Optional[dataclasses_json.utils.CatchAllVar] = <factory>)#

Bases: object

alignImages section of parameters.json parameter file.

alignByBlock: bool#
background_sigma: float#
blockSize: int#
blockSizeXY: int#
dataclass_json_config = {'undefined': Undefined.INCLUDE}#
classmethod from_dict(kvs: Optional[Union[dict, list, str, int, float, bool]], *, infer_missing=False) dataclasses_json.api.A#
classmethod from_json(s: Union[str, bytes, bytearray], *, parse_float=None, parse_int=None, parse_constant=None, infer_missing=False, **kw) dataclasses_json.api.A#
higher_threshold: float#
localAlignment: str#
lower_threshold: float#
outputFile: str#
referenceFiducial: str#
register_global_folder: str#
register_local_folder: str#
classmethod schema(*, infer_missing: bool = False, only=None, exclude=(), many: bool = False, context=None, load_only=(), dump_only=(), partial: bool = False, unknown=None) dataclasses_json.mm.SchemaF[dataclasses_json.api.A]#
to_dict(encode_json=False) Dict[str, Optional[Union[dict, list, str, int, float, bool]]]#
to_json(*, skipkeys: bool = False, ensure_ascii: bool = True, check_circular: bool = True, allow_nan: bool = True, indent: Optional[Union[int, str]] = None, separators: Optional[Tuple[str, str]] = None, default: Optional[Callable] = None, sort_keys: bool = False, **kw) str#
tolerance: float#
unknown_params: Optional[dataclasses_json.utils.CatchAllVar]#
upsample_factor: int#
class core.parameters.SegmentationParams(mask_2d_folder: str = <factory>, mask_3d_folder: str = <factory>, localize_2d_folder: str = <factory>, localize_3d_folder: str = <factory>, operation: str = <factory>, outputFile: str = <factory>, background_method: str = <factory>, stardist_basename: str = <factory>, stardist_network: str = <factory>, stardist_network3D: str = <factory>, tesselation: bool = <factory>, background_sigma: float = <factory>, threshold_over_std: float = <factory>, fwhm: float = <factory>, brightest: int = <factory>, intensity_min: int = <factory>, intensity_max: int = <factory>, area_min: int = <factory>, area_max: int = <factory>, reducePlanes: bool = <factory>, residual_max: float = <factory>, sigma_max: int = <factory>, centroidDifference_max: int = <factory>, _3Dmethod: str = <factory>, _3DGaussianfitWindow: typing.Union[int, str] = <factory>, _3dAP_window: typing.Union[int, str] = <factory>, _3dAP_flux_min: typing.Union[int, str] = <factory>, _3dAP_brightest: typing.Union[int, str] = <factory>, _3dAP_distTolerance: typing.Union[int, str] = <factory>, _3D_threshold_over_std: typing.Union[int, str] = <factory>, _3D_sigma: typing.Union[int, str] = <factory>, _3D_boxSize: typing.Union[int, str] = <factory>, _3D_area_min: typing.Union[int, str] = <factory>, _3D_area_max: typing.Union[int, str] = <factory>, _3D_nlevels: typing.Union[int, str] = <factory>, _3D_contrast: typing.Union[float, str] = <factory>, _3D_psf_z: typing.Union[int, str] = <factory>, _3D_psf_yx: typing.Union[int, str] = <factory>, _3D_lower_threshold: typing.Union[float, str] = <factory>, _3D_higher_threshold: typing.Union[float, str] = <factory>, unknown_params: typing.Optional[dataclasses_json.utils.CatchAllVar] = <factory>)#

Bases: object

segmentedObjects section of parameters.json parameter file.

area_max: int#
area_min: int#
background_method: str#
background_sigma: float#
brightest: int#
centroidDifference_max: int#
dataclass_json_config = {'undefined': Undefined.INCLUDE}#
classmethod from_dict(kvs: Optional[Union[dict, list, str, int, float, bool]], *, infer_missing=False) dataclasses_json.api.A#
classmethod from_json(s: Union[str, bytes, bytearray], *, parse_float=None, parse_int=None, parse_constant=None, infer_missing=False, **kw) dataclasses_json.api.A#
fwhm: float#
intensity_max: int#
intensity_min: int#
localize_2d_folder: str#
localize_3d_folder: str#
mask_2d_folder: str#
mask_3d_folder: str#
operation: str#
outputFile: str#
reducePlanes: bool#
residual_max: float#
classmethod schema(*, infer_missing: bool = False, only=None, exclude=(), many: bool = False, context=None, load_only=(), dump_only=(), partial: bool = False, unknown=None) dataclasses_json.mm.SchemaF[dataclasses_json.api.A]#
sigma_max: int#
stardist_basename: str#
stardist_network: str#
stardist_network3D: str#
tesselation: bool#
threshold_over_std: float#
to_dict(encode_json=False) Dict[str, Optional[Union[dict, list, str, int, float, bool]]]#
to_json(*, skipkeys: bool = False, ensure_ascii: bool = True, check_circular: bool = True, allow_nan: bool = True, indent: Optional[Union[int, str]] = None, separators: Optional[Tuple[str, str]] = None, default: Optional[Callable] = None, sort_keys: bool = False, **kw) str#
unknown_params: Optional[dataclasses_json.utils.CatchAllVar]#
core.parameters.deep_dict_update(main_dict: dict, new_dict: dict)#

Update recursively a nested dict with another. main_dict keys/values that do not exist in new_dict are kept.

Parameters
  • main_dict (dict) – Main dict with all default values

  • new_dict (dict) – Dict with new values to update

Returns

The main_dict overwrite by new_dict value

Return type

dict

core.parameters.get_dictionary_value(dictionary: dict, key: str, default: str = '')#

Get dict value with a default option if key doesn’t exist.

Parameters
  • dictionary (dict) – dictionary object

  • key (str) – key for the dict

  • default (str, optional) – default value is key doesn’t exist, by default “”

Returns

value or default

Return type

str

core.parameters.load_alignment_dict(dict_shifts_path)#

Load a JSON file with the shifts

Parameters

dict_shifts_path (str) – Path to access at the shift dictionary, calculated by register_global

Returns

Shift dictionaries

Return type

(dict,dict)

core.parameters.load_json(file_name)#

Load a JSON file like a python dict

Parameters

file_name (str) – JSON file name

Returns

Python dict

Return type

dict

core.parameters.loads_barcode_dict(file_name)#

Loads a barcode type dict JSON

Parameters

file_name (str) – JSON file name

Returns

Barcode dictionary

Return type

dict

core.parameters.print_dict(dictionary: dict)#

Print parameters in your shell terminal

Parameters

dictionary (dict) – Parameters dictionary

core.parameters.set_default(key: str, val)#
core.parameters.warn_default(key, val)#
core.parameters.warn_pop(dico: dict, key: str, default)#

core.pyhim_logging module#

Classes and functions for pyHiM logging

class core.pyhim_logging.Logger(root_folder, parallel=False, session_name='HiM_analysis', init_msg='')#

Bases: object

setup_logger()#
setup_md_file(session_name: str = 'HiM_analysis')#
core.pyhim_logging.print_analyzing_label(text: str)#
core.pyhim_logging.print_dashes()#
core.pyhim_logging.print_framed_text(text: str, frame: str)#

Example: ================= text =================

Parameters
  • text (str) – Text to print in the middle

  • frame (str) – Template of frame to put in right and left Example: “=================”

core.pyhim_logging.print_log(message, status='INFO')#

Shows message to terminal and logs it to file. Compatible with dask workers. Used the dask logger that used itself logging logger instance before.

Parameters
  • message (str) – message.

  • status (str, optional) – either DEBUG, INFO or WARN. The default is ‘INFO’.

Return type

None.

core.pyhim_logging.print_section(section: str)#
core.pyhim_logging.print_session_name(name: str)#
core.pyhim_logging.print_title(title: str)#
core.pyhim_logging.print_unknown_params(unknown_params: dict)#
core.pyhim_logging.write_string_to_file(file_name, text_to_output, attribute='a')#

write a line of text into a file

Parameters
  • file_name (str) – log file

  • text_to_output (str) – text to write in file

  • attribute (str, optional) – Open file mode option, by default “a”

core.run_args module#

pyHiM argument parser module

class core.run_args.RunArgs(command_line_arguments)#

Bases: object

Store and check run arguments

args_to_str()#

Print parameters in your shell terminal

Parameters

dictionary (dict) – Parameters dictionary

static get_2d_commands()#

Default commands for 2D pipeline

Returns

Set of 2D commands

Return type

frozenset

static get_3d_commands()#

Default commands for 3D pipeline

Returns

Set of 3D commands

Return type

frozenset

static get_available_commands()#

Available commands for pyHiM

Returns

Set of available commands

Return type

frozenset

classmethod parse_cmd(cmd)#

Parse the input command list give by the user as a string (comma-separated)

Parameters

cmd (str) – A comma-separated human-list of commands

Returns

A Python list of commands

Return type

List[str]

core.saving module#

Functions for common image processing

core.saving.annotate_heatmap(im, data=None, valfmt='{x:.1f}', textcolors=('black', 'white'), threshold=None, **textkw)#

A function to annotate a heatmap.

Parameters
  • im – The AxesImage to be labeled.

  • data – Data used to annotate. If None, the image’s data is used. Optional.

  • valfmt – The format of the annotations inside the heatmap. This should either use the string format method, e.g. “$ {x:.2f}”, or be a matplotlib.ticker.Formatter. Optional.

  • textcolors – A pair of colors. The first is used for values below a threshold, the second for those above. Optional.

  • threshold – Value in data units according to which the colors from textcolors are applied. If None (the default) uses the middle of the colormap as separation. Optional.

  • **kwargs – All other arguments are forwarded to each call to text used to create the text labels.

core.saving.display_3d_assembled(images, localizations=None, plotting_range=None, normalize_b=True, masks=None)#
core.saving.heatmap(data, row_labels, col_labels, ax=None, cbar_kw=None, cbarlabel='', fontsize=12, **kwargs)#

Create a heatmap from a numpy array and two lists of labels.

Parameters
  • data – A 2D numpy array of shape (N, M).

  • row_labels – A list or array of length N with the labels for the rows.

  • col_labels – A list or array of length M with the labels for the columns.

  • ax – A matplotlib.axes.Axes instance to which the heatmap is plotted. If not provided, use current axes or create a new one. Optional.

  • cbar_kw – A dictionary with arguments to matplotlib.Figure.colorbar. Optional.

  • cbarlabel – The label for the colorbar. Optional.

  • **kwargs – All other arguments are forwarded to imshow.

core.saving.image_show_with_values(matrices, output_name: str = 'tmp.png', cbarlabels: Optional[list[str]] = None, fontsize=6, verbose: bool = False, title='')#

Plots a list of matrices with their values in each pixel.

Parameters
  • matrices (list) – matrices to plot. Should be 2D numpy arrays

  • output_name (TYPE, optional) – DESCRIPTION. The default is “tmp.png”.

  • cbarlabels (list, optional) – titles of subplots. The default is [“focalPlane”].

  • fontsize (float, optional) – fontsize. The default is 6.

  • verbose (Boolean, optional) – self explanatory. The default is False.

  • title (str, optional) – figure title. The default is “”.

Return type

None.

core.saving.image_show_with_values_single(ax, matrix, cbarlabel, fontsize, cbar_kw, valfmt='{x:.0f}', cmap='YlGn')#
core.saving.plot_3d_shift_matrices(shift_matrices, fontsize=8, log=False, valfmt='{x:.1f}')#
core.saving.plot_4_images(allimages, titles=None)#
core.saving.plot_raw_images_and_labels(image, label)#
Parameters
  • image (List of numpy ndarray (N-dimensional array)) – 3D raw image of format .tif

  • label (List of numpy ndarray (N-dimensional array)) – 3D labeled image of format .tif

core.saving.save_image_2d_cmd(image, file_name)#
core.saving.save_image_as_blocks(img, full_filename, block_size_xy=256, label='raw_image')#

Module contents#