Module qute.transforms.norm
Normalization and scaling transforms.
Classes
class ClippedZNormalize (mean: float, std: float, min_clip: float, max_clip: float, in_place: bool = True)
-
Standardize the passed tensor by subtracting the mean and dividing by the standard deviation.
Constructor
Expand source code
class ClippedZNormalize(Transform): """Standardize the passed tensor by subtracting the mean and dividing by the standard deviation.""" def __init__( self, mean: float, std: float, min_clip: float, max_clip: float, in_place: bool = True, ) -> None: """Constructor""" super().__init__() self.mean = mean self.std = np.max([std, 1e-8]) self.min_clip = min_clip self.max_clip = max_clip self.in_place = in_place def __call__(self, data: torch.Tensor) -> torch.Tensor: """ Apply the transform to the "image" tensor in the data dictionary. Returns ------- data: torch.Tensor Normalized tensor. """ if not self.in_place: if isinstance(data, torch.Tensor): data = data.clone() elif isinstance(data, np.ndarray): data = data.copy() else: raise TypeError( "Unsupported data type. Data should be a PyTorch Tensor or a NumPy array." ) tmp = torch.clip(data, self.min_clip, self.max_clip) tmp = (tmp - self.mean) / self.std return tmp
Ancestors
- monai.transforms.transform.Transform
- abc.ABC
class ClippedZNormalized (mean: float, std: float, min_clip: float, max_clip: float, image_key: str = 'image')
-
Standardize the "image" tensor by subtracting the mean and dividing by the standard deviation.
Constructor
Expand source code
class ClippedZNormalized(MapTransform): """Standardize the "image" tensor by subtracting the mean and dividing by the standard deviation.""" def __init__( self, mean: float, std: float, min_clip: float, max_clip: float, image_key: str = "image", ) -> None: """Constructor""" super().__init__(keys=[image_key]) self.image_key = image_key self.mean = mean self.std = np.max([std, 1e-8]) self.min_clip = min_clip self.max_clip = max_clip def __call__(self, data: dict) -> dict: """ Apply the transform to the "image" tensor in the data dictionary. Returns ------- data: dict Updated dictionary with normalized "image" tensor. """ # Work on a copy of the input dictionary data d = dict(data) d[self.image_key] = torch.clip(d[self.image_key], self.min_clip, self.max_clip) d[self.image_key] = (d[self.image_key] - self.mean) / self.std return d
Ancestors
- monai.transforms.transform.MapTransform
- monai.transforms.transform.Transform
- abc.ABC
class MinMaxNormalize (min_intensity: float = 0.0, max_intensity: float = 65535.0, in_place: bool = True)
-
Normalize a tensor to [0, 1] using given min and max absolute intensities.
Constructor
Parameters
min_intensity
:float
- Minimum intensity to normalize against (optional, default = 0.0).
max_intensity
:float
- Maximum intensity to normalize against (optional, default = 65535.0).
Returns
norm
:tensor
- Normalized tensor.
Expand source code
class MinMaxNormalize(Transform): """Normalize a tensor to [0, 1] using given min and max absolute intensities.""" def __init__( self, min_intensity: float = 0.0, max_intensity: float = 65535.0, in_place: bool = True, ) -> None: """Constructor Parameters ---------- min_intensity: float Minimum intensity to normalize against (optional, default = 0.0). max_intensity: float Maximum intensity to normalize against (optional, default = 65535.0). Returns ------- norm: tensor Normalized tensor. """ super().__init__() self.min_intensity = min_intensity self.max_intensity = max_intensity self.range_intensity = self.max_intensity - self.min_intensity self.in_place = in_place def __call__(self, data: np.ndarray) -> np.ndarray: """ Apply the transform to `image`. Returns ------- result: tensor A stack of images with the same width and height as `label` and with `num_classes` planes. """ if not self.in_place: if isinstance(data, torch.Tensor): data = data.clone() elif isinstance(data, np.ndarray): data = data.copy() else: raise TypeError( "Unsupported data type. Data should be a PyTorch Tensor or a NumPy array." ) return (data - self.min_intensity) / self.range_intensity
Ancestors
- monai.transforms.transform.Transform
- abc.ABC
class MinMaxNormalized (keys: tuple[str, ...] = ('image', 'label'), min_intensity: float = 0.0, max_intensity: float = 65535.0)
-
Normalize the "image" tensor to [0, 1] using given min and max absolute intensities from the data dictionary.
Constructor
Parameters
keys
:tuple[str]
- Keys for the data dictionary.
min_intensity
:float
- Minimum intensity to normalize against (optional, default = 0.0).
max_intensity
:float
- Maximum intensity to normalize against (optional, default = 65535.0).
Expand source code
class MinMaxNormalized(MapTransform): """Normalize the "image" tensor to [0, 1] using given min and max absolute intensities from the data dictionary.""" def __init__( self, keys: tuple[str, ...] = ("image", "label"), min_intensity: float = 0.0, max_intensity: float = 65535.0, ) -> None: """Constructor Parameters ---------- keys: tuple[str] Keys for the data dictionary. min_intensity: float Minimum intensity to normalize against (optional, default = 0.0). max_intensity: float Maximum intensity to normalize against (optional, default = 65535.0). """ super().__init__(keys=keys) self.keys = keys self.min_intensity = min_intensity self.max_intensity = max_intensity self.range_intensity = self.max_intensity - self.min_intensity def __call__(self, data: dict) -> dict: """ Apply the transform to the "image" tensor in the data dictionary. Returns ------- data: dict Updated dictionary with normalized "image" tensor. """ # Work on a copy of the input dictionary data d = dict(data) # Process the images for key in self.keys: d[key] = (d[key] - self.min_intensity) / self.range_intensity return d
Ancestors
- monai.transforms.transform.MapTransform
- monai.transforms.transform.Transform
- abc.ABC
class Scale (factor: float = 65535.0, dtype: torch.dtype = torch.int32, in_place: bool = True)
-
Scale the image by a constant factor and optionally type-casts it.
Constructor
Parameters
factor
:float
- Factor by which to scale the images (optional, default = 65535.0).
dtype
:torch.dtype
- Data type of the final image (optional, default = torch.int32).
Returns
norm
:tensor
- Normalized tensor.
Expand source code
class Scale(Transform): """Scale the image by a constant factor and optionally type-casts it.""" def __init__( self, factor: float = 65535.0, dtype: torch.dtype = torch.int32, in_place: bool = True, ) -> None: """Constructor Parameters ---------- factor: float Factor by which to scale the images (optional, default = 65535.0). dtype: torch.dtype Data type of the final image (optional, default = torch.int32). Returns ------- norm: tensor Normalized tensor. """ super().__init__() self.factor = factor self.dtype = dtype self.in_place = in_place def __call__(self, data: np.ndarray) -> Union[np.ndarray, torch.Tensor]: """ Apply the transform to `image`. Returns ------- result: Union[np.ndarray, torch.Tensor] Tensor or NumPy array with scaled intensities and type-cast. """ if not self.in_place: if isinstance(data, torch.Tensor): data = data.clone() elif isinstance(data, np.ndarray): data = data.copy() else: raise TypeError( "Unsupported data type. Data should be a PyTorch Tensor or a NumPy array." ) # Process the images return (data * self.factor).to(self.dtype)
Ancestors
- monai.transforms.transform.Transform
- abc.ABC
class Scaled (keys: tuple[str, ...] = ('image', 'label'), factor: float = 65535.0, dtype: torch.dtype = torch.int32)
-
Scale the images in the data dictionary by a constant factor and optionally type-casts them.
Constructor
Parameters
keys
:tuple[str]
- Keys for the data dictionary.
factor
:float
- Factor by which to scale the images (optional, default = 65535.0).
dtype
:torch.dtype
- Data type of the final image (optional, default = torch.int32).
Expand source code
class Scaled(MapTransform): """Scale the images in the data dictionary by a constant factor and optionally type-casts them.""" def __init__( self, keys: tuple[str, ...] = ("image", "label"), factor: float = 65535.0, dtype: torch.dtype = torch.int32, ) -> None: """Constructor Parameters ---------- keys: tuple[str] Keys for the data dictionary. factor: float Factor by which to scale the images (optional, default = 65535.0). dtype: torch.dtype Data type of the final image (optional, default = torch.int32). """ super().__init__(keys=keys) self.keys = keys self.factor = factor self.dtype = dtype def __call__(self, data: dict) -> dict: """ Apply the transform to the tensors in the data dictionary. Returns ------- data: dict Updated dictionary with scaled images. """ # Work on a copy of the input dictionary data d = dict(data) # Process the images for key in self.keys: d[key] = (d[key] * self.factor).to(self.dtype) return d
Ancestors
- monai.transforms.transform.MapTransform
- monai.transforms.transform.Transform
- abc.ABC
class ZNormalize (in_place: bool = True)
-
Standardize the passed tensor by subtracting the mean and dividing by the standard deviation.
Constructor
Expand source code
class ZNormalize(Transform): """Standardize the passed tensor by subtracting the mean and dividing by the standard deviation.""" def __init__(self, in_place: bool = True) -> None: """Constructor""" super().__init__() self.in_place = in_place def __call__(self, data: torch.Tensor) -> torch.Tensor: """ Apply the transform to the "image" tensor in the data dictionary. Returns ------- data: torch.Tensor Normalized tensor. """ if not self.in_place: if isinstance(data, torch.Tensor): data = data.clone() elif isinstance(data, np.ndarray): data = data.copy() else: raise TypeError( "Unsupported data type. Data should be a PyTorch Tensor or a NumPy array." ) return (data - data.mean()) / data.std()
Ancestors
- monai.transforms.transform.Transform
- abc.ABC
class ZNormalized (keys: tuple[str, ...] = ('image', 'label'))
-
Standardize the "image" tensor by subtracting the mean and dividing by the standard deviation.
Constructor
Expand source code
class ZNormalized(MapTransform): """Standardize the "image" tensor by subtracting the mean and dividing by the standard deviation.""" def __init__(self, keys: tuple[str, ...] = ("image", "label")) -> None: """Constructor""" super().__init__(keys=keys) self.keys = keys def __call__(self, data: dict) -> dict: """ Apply the transform to the "image" tensor in the data dictionary. Returns ------- data: dict Updated dictionary with normalized "image" tensor. """ # Work on a copy of the input dictionary data d = dict(data) for key in self.keys: mn = d[key].mean() sd = d[key].std() d[key] = (d[key] - mn) / sd return d
Ancestors
- monai.transforms.transform.MapTransform
- monai.transforms.transform.Transform
- abc.ABC