topostats.grains#

Find grains in an image.

Attributes#

Classes#

GrainCrop

Class for storing the crops of grains.

GrainCropsDirection

Dataclass for storing the crops of grains in a particular imaging direction.

ImageGrainCrops

Dataclass for storing the crops of grains in an image.

Grains

Find grains in an image.

Functions#

validate_full_mask_tensor_shape(...)

Validate the shape of the full mask tensor.

Module Contents#

topostats.grains.LOGGER#
class topostats.grains.GrainCrop(image: numpy.typing.NDArray[numpy.float32], mask: numpy.typing.NDArray[numpy.bool_], padding: int, bbox: tuple[int, int, int, int], pixel_to_nm_scaling: float, filename: str)[source]#

Class for storing the crops of grains.

Parameters:
  • image (npt.NDArray[np.float32]) – 2-D Numpy array of the cropped image.

  • mask (npt.NDArray[np.bool_]) – 3-D Numpy tensor of the cropped mask.

  • padding (int) – Padding added to the bounding box of the grain during cropping.

  • bbox (tuple[int, int, int, int]) – Bounding box of the crop including padding.

  • pixel_to_nm_scaling (float) – Pixel to nanometre scaling factor for the crop.

  • filename (str) – Filename of the image from which the crop was taken.

property padding: int#

Getter for the padding.

Returns:

The padding amount.

Return type:

int

property image: numpy.typing.NDArray[numpy.float32]#

Getter for the image.

Returns:

Numpy array of the image.

Return type:

npt.NDArray

property mask: numpy.typing.NDArray[numpy.bool_]#

Getter for the mask.

Returns:

Numpy array of the mask.

Return type:

npt.NDArray[np.bool_]

property bbox: tuple[int, int, int, int]#

Getter for the bounding box.

Returns:

Bounding box of the crop.

Return type:

tuple

Raises:

ValueError – If the bounding box is not square.

property pixel_to_nm_scaling: float#

Getter for the pixel to nanometre scaling factor.

Returns:

Pixel to nanometre scaling factor.

Return type:

float

property filename: str#

Getter for the filename.

Returns:

The image filename.

Return type:

str

__eq__(other: object) bool[source]#

Check if two GrainCrop objects are equal.

Parameters:

other (object) – Object to compare to.

Returns:

True if the objects are equal, False otherwise.

Return type:

bool

grain_crop_to_dict() dict[str, Any][source]#

Convert GrainCrop to dictionary indexed by attributes.

Returns:

Dictionary indexed by attribute of the grain attributes.

Return type:

dict[str, Any]

debug_locate_difference(other: object) None[source]#

Debug function to find the culprit when two GrainCrop objects are not equal.

Parameters:

other (object) – Object to compare to.

Raises:

ValueError – If the objects are not equal

topostats.grains.validate_full_mask_tensor_shape(array: numpy.typing.NDArray[numpy.bool_]) numpy.typing.NDArray[numpy.bool_][source]#

Validate the shape of the full mask tensor.

Parameters:

array (npt.NDArray) – Numpy array to validate.

Returns:

Numpy array if valid.

Return type:

npt.NDArray

class topostats.grains.GrainCropsDirection[source]#

Dataclass for storing the crops of grains in a particular imaging direction.

full_mask_tensor#

Boolean NxNx3 array of the full mask tensor.

Type:

npt.NDArray[np.bool_]

crops#

Grain crops.

Type:

GrainCrops

crops: dict[int, GrainCrop]#
property full_mask_tensor: numpy.typing.NDArray[numpy.bool_]#

Getter for the full mask tensor.

Returns:

Numpy array of the full mask tensor.

Return type:

npt.NDArray

__post_init__()[source]#

Validate the full mask tensor shape.

Raises:

ValueError – If the full mask tensor shape is invalid.

__eq__(other: object) bool[source]#

Check if two GrainCropsDirection objects are equal.

Parameters:

other (object) – Object to compare to.

Returns:

True if the objects are equal, False otherwise.

Return type:

bool

grain_crops_direction_to_dict() dict[str, numpy.typing.NDArray[numpy.bool_] | dict[str:Any]][source]#

Convert GrainCropsDirection to dictionary indexed by attributes.

Returns:

Dictionary indexed by attribute of the grain attributes.

Return type:

dict[str, Any]

debug_locate_difference(other: object) None[source]#

Debug function to find the culprit when two GrainCropsDirection objects are not equal.

Parameters:

other (object) – Object to compare to.

Raises:

ValueError – If the objects are not equal.

update_full_mask_tensor()[source]#

Update the full mask tensor from the grain crops.

class topostats.grains.ImageGrainCrops[source]#

Dataclass for storing the crops of grains in an image.

above#

Grains in the above direction.

Type:

GrainCropDirection | None

below#

Grains in the below direction.

Type:

GrainCropDirection | None

above: GrainCropsDirection | None#
below: GrainCropsDirection | None#
__eq__(other: object) bool[source]#

Check if two ImageGrainCrops objects are equal.

Parameters:

other (object) – Object to compare to.

Returns:

True if the objects are equal, False otherwise.

Return type:

bool

image_grain_crops_to_dict() dict[str, numpy.typing.NDArray[numpy.bool_] | dict[str:Any]][source]#

Convert ImageGrainCrops to dictionary indexed by attributes.

Returns:

Dictionary indexed by attribute of the grain attributes.

Return type:

dict[str, Any]

debug_locate_difference(other: object) None[source]#

Debug function to find the culprit when two ImageGrainCrops objects are not equal.

Parameters:

other (object) – Object to compare to.

Raises:

ValueError – If the objects are not equal.

class topostats.grains.Grains(image: numpy.typing.NDArray, filename: str, pixel_to_nm_scaling: float, grain_crop_padding: int = 1, unet_config: dict[str, str | int | float | tuple[int | None, int, int, int] | None] | None = None, threshold_method: str | None = None, otsu_threshold_multiplier: float | None = None, threshold_std_dev: dict | None = None, threshold_absolute: dict | None = None, absolute_area_threshold: dict | None = None, direction: str | None = None, smallest_grain_size_nm2: float | None = None, remove_edge_intersecting_grains: bool = True, classes_to_merge: list[tuple[int, int]] | None = None, vetting: dict | None = None)[source]#

Find grains in an image.

Parameters:
  • image (npt.NDArray) – 2-D Numpy array of image.

  • filename (str) – File being processed (used in logging).

  • pixel_to_nm_scaling (float) – Scaling of pixels to nanometres.

  • grain_crop_padding (int) – Padding to add to the bounding box of the grain during cropping.

  • unet_config (dict[str, str | int | float | tuple[int | None, int, int, int] | None]) –

    Configuration for the UNet model. model_path: str

    Path to the UNet model.

    upper_norm_bound: float

    Upper bound for normalising the image.

    lower_norm_bound: float

    Lower bound for normalising the image.

  • threshold_method (str) – Method for determining thershold to mask values, default is ‘otsu’.

  • otsu_threshold_multiplier (float) – Factor by which the below threshold is to be scaled prior to masking.

  • threshold_std_dev (dict) – Dictionary of ‘below’ and ‘above’ factors by which standard deviation is multiplied to derive the threshold if threshold_method is ‘std_dev’.

  • threshold_absolute (dict) – Dictionary of absolute ‘below’ and ‘above’ thresholds for grain finding.

  • absolute_area_threshold (dict) – Dictionary of above and below grain’s area thresholds.

  • direction (str) – Direction for which grains are to be detected, valid values are ‘above’, ‘below’ and ‘both’.

  • smallest_grain_size_nm2 (float) – Whether or not to remove grains that intersect the edge of the image.

  • remove_edge_intersecting_grains (bool) – Direction for which grains are to be detected, valid values are ‘above’, ‘below’ and ‘both’.

  • classes_to_merge (list[tuple[int, int]] | None) – List of tuples of classes to merge.

  • vetting (dict | None) – Dictionary of vetting parameters.

image#
filename#
pixel_to_nm_scaling#
threshold_method = None#
otsu_threshold_multiplier = None#
threshold_std_dev = None#
threshold_absolute = None#
absolute_area_threshold = None#
direction#
smallest_grain_size_nm2 = None#
remove_edge_intersecting_grains = True#
thresholds: dict[str, float] | None = None#
images#
directions#
minimum_grain_size = None#
region_properties#
bounding_boxes#
grainstats = None#
grain_crop_padding = 1#
unet_config = None#
vetting = None#
classes_to_merge = None#
minimum_grain_size_px = 10#
minimum_bbox_size_px = 5#
image_grain_crops#
tidy_border(image: numpy.typing.NDArray, **kwargs) numpy.typing.NDArray[source]#

Remove grains touching the border.

Parameters:
  • image (npt.NDarray) – 2-D Numpy array representing the image.

  • **kwargs – Arguments passed to ‘skimage.segmentation.clear_border(**kwargs)’.

Returns:

2-D Numpy array of image without objects touching the border.

Return type:

npt.NDarray

static label_regions(image: numpy.typing.NDArray, background: int = 0) numpy.typing.NDArray[source]#

Label regions.

This method is used twice, once prior to removal of small regions and again afterwards which is why an image must be supplied rather than using ‘self’.

Parameters:
  • image (npt.NDArray) – 2-D Numpy array of image.

  • background (int) – Value used to indicate background of image. Default = 0.

Returns:

2-D Numpy array of image with regions numbered.

Return type:

npt.NDArray

calc_minimum_grain_size(image: numpy.typing.NDArray) float[source]#

Calculate the minimum grain size in pixels squared.

Very small objects are first removed via thresholding before calculating the below extreme.

Parameters:

image (npt.NDArray) – 2-D Numpy image from which to calculate the minimum grain size.

Returns:

Minimum grains size in pixels squared. If there are areas a value of -1 is returned.

Return type:

float

remove_noise(image: numpy.typing.NDArray, **kwargs) numpy.typing.NDArray[source]#

Remove noise which are objects smaller than the ‘smallest_grain_size_nm2’.

This ensures that the smallest objects ~1px are removed regardless of the size distribution of the grains.

Parameters:
  • image (npt.NDArray) – 2-D Numpy array to be cleaned.

  • **kwargs – Arguments passed to ‘skimage.morphology.remove_small_objects(**kwargs)’.

Returns:

2-D Numpy array of image with objects < smallest_grain_size_nm2 removed.

Return type:

npt.NDArray

remove_small_objects(image: numpy.array, **kwargs) numpy.typing.NDArray[source]#

Remove small objects from the input image.

Threshold determined by the minimum grain size, in pixels squared, of the classes initialisation.

Parameters:
  • image (np.array) – 2-D Numpy array to remove small objects from.

  • **kwargs – Arguments passed to ‘skimage.morphology.remove_small_objects(**kwargs)’.

Returns:

2-D Numpy array of image with objects < minimumm_grain_size removed.

Return type:

npt.NDArray

remove_objects_too_small_to_process(image: numpy.typing.NDArray, minimum_size_px: int, minimum_bbox_size_px: int) numpy.typing.NDArray[numpy.bool_][source]#

Remove objects whose dimensions in pixels are too small to process.

Parameters:
  • image (npt.NDArray) – 2-D Numpy array of image.

  • minimum_size_px (int) – Minimum number of pixels for an object.

  • minimum_bbox_size_px (int) – Limit for the minimum dimension of an object in pixels. Eg: 5 means the object’s bounding box must be at least 5x5.

Returns:

2-D Numpy array of image with objects removed that are too small to process.

Return type:

npt.NDArray

area_thresholding(image: numpy.typing.NDArray, area_thresholds: tuple) numpy.typing.NDArray[source]#

Remove objects larger and smaller than the specified thresholds.

Parameters:
  • image (npt.NDArray) – Image array where the background == 0 and grains are labelled as integers >0.

  • area_thresholds (tuple) – List of area thresholds (in nanometres squared, not pixels squared), first is the lower limit for size, second is the upper.

Returns:

Array with small and large objects removed.

Return type:

npt.NDArray

colour_regions(image: numpy.typing.NDArray, **kwargs) numpy.typing.NDArray[source]#

Colour the regions.

Parameters:
  • image (npt.NDArray) – 2-D array of labelled regions to be coloured.

  • **kwargs – Arguments passed to ‘skimage.color.label2rgb(**kwargs)’.

Returns:

Numpy array of image with objects coloured.

Return type:

np.array

static get_region_properties(image: numpy.array, **kwargs) list[source]#

Extract the properties of each region.

Parameters:
  • image (np.array) – Numpy array representing image.

  • **kwargs – Arguments passed to ‘skimage.measure.regionprops(**kwargs)’.

Returns:

List of region property objects.

Return type:

list

get_bounding_boxes(direction: str) dict[source]#

Derive a list of bounding boxes for each region from the derived region_properties.

Parameters:

direction (str) – Direction of threshold for which bounding boxes are being calculated.

Returns:

Dictionary of bounding boxes indexed by region area.

Return type:

dict

find_grains() None[source]#

Find grains.

static improve_grain_segmentation_unet(graincrops: dict[int, GrainCrop], filename: str, direction: str, unet_config: dict[str, str | int | float | tuple[int | None, int, int, int] | None]) dict[int, GrainCrop][source]#

Use a UNet model to re-segment existing grains to improve their accuracy.

Parameters:
  • graincrops (dict[int, GrainCrop]) – Dictionary of grain crops.

  • filename (str) – File being processed (used in logging).

  • direction (str) – Direction of threshold for which bounding boxes are being calculated.

  • unet_config (dict[str, str | int | float | tuple[int | None, int, int, int] | None]) –

    Configuration for the UNet model. model_path: str

    Path to the UNet model.

    grain_crop_padding: int

    Padding to add to the bounding box of the grain before cropping.

    upper_norm_bound: float

    Upper bound for normalising the image.

    lower_norm_bound: float

    Lower bound for normalising the image.

Returns:

Dictionary of (hopefully) improved grain crops.

Return type:

dict[int, GrainCrop]

static keep_largest_labelled_region(labelled_image: numpy.typing.NDArray[numpy.int32]) numpy.typing.NDArray[numpy.bool_][source]#

Keep only the largest region in a labelled image.

Parameters:

labelled_image (npt.NDArray) – 2-D Numpy array of labelled regions.

Returns:

2-D Numpy boolean array of labelled regions with only the largest region.

Return type:

npt.NDArray

static flatten_multi_class_tensor(grain_mask_tensor: numpy.typing.NDArray) numpy.typing.NDArray[source]#

Flatten a multi-class image tensor to a single binary mask.

The returned tensor is of boolean type in case there are multiple hits in the same pixel. We dont want to have 2s, 3s etc because this would cause issues in labelling and cause erroneous grains within grains.

Parameters:

grain_mask_tensor (npt.NDArray) – Multi class grain mask tensor tensor of shape (N, N, C).

Returns:

Combined binary mask of all but the background class (:, :, 0).

Return type:

npt.NDArray

static get_multi_class_grain_bounding_boxes(grain_mask_tensor: numpy.typing.NDArray) dict[source]#

Get the bounding boxes for each grain in a multi-class image tensor.

Finds the bounding boxes for each grain in a multi-class image tensor. Grains can span multiple classes, so the bounding boxes are found for the combined binary mask of contiguous grains across all classes.

Parameters:

grain_mask_tensor (npt.NDArray) – 3-D Numpy array of grain mask tensor.

Returns:

Dictionary of bounding boxes indexed by grain number.

Return type:

dict

static update_background_class(grain_mask_tensor: numpy.typing.NDArray) numpy.typing.NDArray[numpy.bool_][source]#

Update the background class to reflect the other classes.

Parameters:

grain_mask_tensor (npt.NDArray) – 3-D Numpy array of the grain mask tensor.

Returns:

3-D Numpy array of image tensor with updated background class.

Return type:

npt.NDArray

static vet_class_sizes_single_grain(single_grain_mask_tensor: numpy.typing.NDArray, pixel_to_nm_scaling: float, class_size_thresholds: list[tuple[int, int, int]] | None) tuple[numpy.typing.NDArray, bool][source]#

Remove regions of particular classes based on size thresholds.

Regions of classes that are too large or small may need to be removed for many reasons (eg removing noise erroneously detected by the model or larger-than-expected molecules that are obviously erroneous), this method allows for the removal of these regions based on size thresholds.

Parameters:
  • single_grain_mask_tensor (npt.NDArray) – 3-D Numpy array of the mask tensor.

  • pixel_to_nm_scaling (float) – Scaling of pixels to nanometres.

  • class_size_thresholds (list[list[int, int, int]] | None) – List of class size thresholds. Structure is [(class_index, lower, upper)].

Returns:

  • npt.NDArray – 3-D Numpy array of the mask tensor with grains removed based on size thresholds.

  • bool – True if the grain passes the vetting, False if it fails.

static get_individual_grain_crops(grain_mask_tensor: numpy.typing.NDArray, padding: int = 1) tuple[list[numpy.typing.NDArray], list[numpy.typing.NDArray], int][source]#

Get individual grain crops from an image tensor.

Fetches individual grain crops from an image tensor, but zeros any non-connected grains in the crop region. This is to ensure that other grains do not affect further processing steps.

Parameters:
  • grain_mask_tensor (npt.NDArray) – 3-D Numpy array of image tensor.

  • padding (int) – Padding to add to the bounding box of the grain before cropping. Default is 1.

Returns:

  • list[npt.NDArray] – List of individual grain crops.

  • list[npt.NDArray] – List of bounding boxes for each grain.

  • int – Padding used for the bounding boxes.

static vet_numbers_of_regions_single_grain(grain_mask_tensor: numpy.typing.NDArray, class_region_number_thresholds: list[tuple[int, int, int]] | None) tuple[numpy.typing.NDArray, bool][source]#

Check if the number of regions of different classes for a single grain is within thresholds.

Parameters:
  • grain_mask_tensor (npt.NDArray) – 3-D Numpy array of the grain mask tensor, should be of only one grain.

  • class_region_number_thresholds (list[list[int, int, int]]) – List of class region number thresholds. Structure is [(class_index, lower, upper)].

Returns:

  • npt.NDArray – 3-D Numpy array of the grain mask tensor with grains removed based on region number thresholds.

  • bool – True if the grain passes the vetting, False if it fails.

static convert_classes_to_nearby_classes(grain_mask_tensor: numpy.typing.NDArray, classes_to_convert: list[tuple[int, int]] | None, class_touching_threshold: int = 1) numpy.typing.NDArray[source]#

Convert all but the largest regions of one class into another class provided the former touches the latter.

Specifically, it takes a list of tuples of two integers (dubbed class A and class B). For each class A, class B pair, it will find the largest region of class A and flag it to be ignored. Then for each non-largest region of class A, it will check if it touches any class B region (within the class_touching_threshold distance). If it does, it will convert the region to class B.

This is useful for situations where you want just one region of class A and the model has a habit of producing small regions of class A interspersed in the class B regions, which should be class B instead.

Parameters:
  • grain_mask_tensor (npt.NDArray) – 3-D Numpy array of the grain mask tensor.

  • classes_to_convert (list) – List of tuples of classes to convert. Structure is [(class_a, class_b)].

  • class_touching_threshold (int) – Number of dilation passes to do to determine class A connectivity with class B.

Returns:

3-D Numpy array of the grain mask tensor with classes converted.

Return type:

npt.NDArray

static keep_largest_labelled_region_classes(single_grain_mask_tensor: numpy.typing.NDArray, keep_largest_labelled_regions_classes: list[int] | None) numpy.typing.NDArray[source]#

Keep only the largest region in specific classes.

Parameters:
  • single_grain_mask_tensor (npt.NDArray) – 3-D Numpy array of the grain mask tensor.

  • keep_largest_labelled_regions_classes (list[int]) – List of classes to keep only the largest region.

Returns:

3-D Numpy array of the grain mask tensor with only the largest regions in specific classes.

Return type:

npt.NDArray

static calculate_region_connection_regions(grain_mask_tensor: numpy.typing.NDArray, classes: tuple[int, int]) tuple[int, numpy.typing.NDArray, dict[int, numpy.typing.NDArray[int]]][source]#

Get a list of connection regions between two classes.

Parameters:
  • grain_mask_tensor (npt.NDArray) – 3-D Numpy array of the grain mask tensor.

  • classes (tuple[int, int]) – Tuple pair of classes to calculate the connection regions.

Returns:

  • int – Number of connection regions.

  • npt.NDArray – 2-D Numpy array of the intersection labels.

  • dict – Dictionary of connection points indexed by region label.

static vet_class_connection_points(grain_mask_tensor: numpy.typing.NDArray, class_connection_point_thresholds: list[tuple[tuple[int, int], tuple[int, int]]] | None) bool[source]#

Vet the number of connection points between regions in specific classes.

Parameters:
  • grain_mask_tensor (npt.NDArray) – 3-D Numpy array of the grain mask tensor.

  • class_connection_point_thresholds (list[tuple[tuple[int, int], tuple[int, int]]] | None) – List of tuples of classes and connection point thresholds. Structure is [(class_pair, (lower, upper))].

Returns:

True if the grain passes the vetting, False if it fails.

Return type:

bool

static assemble_grain_mask_tensor_from_crops(grain_mask_tensor_shape: tuple[int, int, int], grain_crops_and_bounding_boxes: list[dict[str, numpy.typing.NDArray]]) numpy.typing.NDArray[source]#

Combine individual grain crops into a single grain mask tensor.

Parameters:
  • grain_mask_tensor_shape (tuple) – Shape of the grain mask tensor.

  • grain_crops_and_bounding_boxes (list) – List of dictionaries containing the grain crops and bounding boxes. Structure: [{“grain_tensor”: npt.NDArray, “bounding_box”: tuple, “padding”: int}].

Returns:

3-D Numpy array of the grain mask tensor.

Return type:

npt.NDArray

static convert_classes_when_too_big_or_small(grain_mask_tensor: numpy.typing.NDArray, pixel_to_nm_scaling: float, class_conversion_size_thresholds: list[tuple[tuple[int, int, int], tuple[int, int]]] | None) numpy.typing.NDArray[source]#

Convert classes when they are too big or too small based on size thresholds.

Parameters:
  • grain_mask_tensor (npt.NDArray) – 3-D Numpy array of the grain mask tensor.

  • pixel_to_nm_scaling (float) – Scaling of pixels to nanometres.

  • class_conversion_size_thresholds (list) – List of class conversion size thresholds. Structure is [(class_index, class_to_convert_to_if_to_small, class_to_convert_to_if_too_big), (lower_threshold, upper_threshold)].

Returns:

3-D Numpy array of the grain mask tensor with classes converted based on size thresholds.

Return type:

npt.NDArray

static vet_grains(graincrops: dict[int, GrainCrop], class_conversion_size_thresholds: list[tuple[tuple[int, int, int], tuple[int, int]]] | None, class_size_thresholds: list[tuple[int, int, int]] | None, class_region_number_thresholds: list[tuple[int, int, int]] | None, nearby_conversion_classes_to_convert: list[tuple[int, int]] | None, class_touching_threshold: int, keep_largest_labelled_regions_classes: list[int] | None, class_connection_point_thresholds: list[tuple[tuple[int, int], tuple[int, int]]] | None) dict[int, GrainCrop][source]#

Vet grains in a grain mask tensor based on a variety of criteria.

Parameters:
  • graincrops (dict[int, GrainCrop]) – Dictionary of grain crops.

  • class_conversion_size_thresholds (list) – List of class conversion size thresholds. Structure is [(class_index, class_to_convert_to_if_too_small, class_to_convert_to_if_too_big), (lower_threshold, upper_threshold)].

  • class_size_thresholds (list) – List of class size thresholds. Structure is [(class_index, lower, upper)].

  • class_region_number_thresholds (list) – List of class region number thresholds. Structure is [(class_index, lower, upper)].

  • nearby_conversion_classes_to_convert (list) – List of tuples of classes to convert. Structure is [(class_a, class_b)].

  • class_touching_threshold (int) – Number of dilation passes to do to determine class A connectivity with class B.

  • keep_largest_labelled_regions_classes (list) – List of classes to keep only the largest region.

  • class_connection_point_thresholds (list) – List of tuples of classes and connection point thresholds. Structure is [(class_pair, (lower, upper))].

Returns:

Dictionary of grain crops that passed the vetting.

Return type:

dict[int, GrainCrop]

static merge_classes(grain_mask_tensor: numpy.typing.NDArray, classes_to_merge: list[tuple[int]] | None) numpy.typing.NDArray[source]#

Merge classes in a grain mask tensor and add them to the grain tensor.

Parameters:
  • grain_mask_tensor (npt.NDArray) – 3-D Numpy array of the grain mask tensor.

  • classes_to_merge (list | None) – List of tuples for classes to merge, can be any number of classes.

Returns:

3-D Numpy array of the grain mask tensor with classes merged.

Return type:

npt.NDArray

static construct_full_mask_from_graincrops(graincrops: dict[int, GrainCrop], image_shape: tuple[int, int]) numpy.typing.NDArray[numpy.bool_][source]#

Construct a full mask tensor from the grain crops.

Parameters:
  • graincrops (dict[int, GrainCrop]) – Dictionary of grain crops.

  • image_shape (tuple[int, int, int]) – Shape of the original image.

Returns:

NxNxC Numpy array of the full mask tensor.

Return type:

npt.NDArray[np.bool_]

static extract_grains_from_full_image_tensor(image: numpy.typing.NDArray[numpy.float32], full_mask_tensor: numpy.typing.NDArray[numpy.bool_], padding: int, pixel_to_nm_scaling: float, filename: str) dict[int, GrainCrop][source]#

Extract grains from the full image mask tensor.

Grains are detected using connected components across all classes in the full mask tensor.

Parameters:
  • image (npt.NDArray[np.float32]) – 2-D Numpy array of the image.

  • full_mask_tensor (npt.NDArray[np.bool_]) – 3-D NxNxC boolean numpy array of all the class masks for the image.

  • padding (int) – Padding added to the bounding box of the grain before cropping.

  • pixel_to_nm_scaling (float) – Pixel to nanometre scaling factor.

  • filename (str) – Filename of the image.

Returns:

Dictionary of grain crops.

Return type:

dict[int, GrainCrop]

static graincrops_remove_objects_too_small_to_process(graincrops: dict[int, GrainCrop], min_object_size: int, min_object_bbox_size: int) dict[int, GrainCrop][source]#

Remove objects that are too small to process from each class of the grain crops.

Parameters:
  • graincrops (dict[int, GrainCrop]) – Dictionary of grain crops.

  • min_object_size (int) – Minimum object size to keep (pixels).

  • min_object_bbox_size (int) – Minimum object bounding box size to keep (pixels^2).

Returns:

Dictionary of grain crops with objects too small to process removed.

Return type:

dict[int, GrainCrop]

static graincrops_merge_classes(graincrops: dict[int, GrainCrop], classes_to_merge: list[tuple[int]] | None) dict[int, GrainCrop][source]#

Merge classes in the grain crops.

Parameters:
  • graincrops (dict[int, GrainCrop]) – Dictionary of grain crops.

  • classes_to_merge (list | None) – List of tuples for classes to merge, can be any number of classes.

Returns:

Dictionary of grain crops with classes merged.

Return type:

dict[int, GrainCrop]

static graincrops_update_background_class(graincrops: dict[int, GrainCrop]) dict[int, GrainCrop][source]#

Update the background class in the grain crops.

Parameters:

graincrops (dict[int, GrainCrop]) – Dictionary of grain crops.

Returns:

Dictionary of grain crops with updated background class.

Return type:

dict[int, GrainCrop]

static remove_disconnected_grains(original_grain_tensor: numpy.typing.NDArray, predicted_grain_tensor: numpy.typing.NDArray)[source]#

Remove grains that are not connected to the original grains.

Parameters:
  • original_grain_tensor (npt.NDArray) – 3-D Numpy array of the original grain tensor.

  • predicted_grain_tensor (npt.NDArray) – 3-D Numpy array of the predicted grain tensor.

Returns:

3-D Numpy array of the predicted grain tensor with grains not connected to the original grains removed.

Return type:

npt.NDArray