diff --git a/aydin/nn/util/data_util.py b/aydin/nn/util/data_util.py index 261c2986..27bf3903 100644 --- a/aydin/nn/util/data_util.py +++ b/aydin/nn/util/data_util.py @@ -11,6 +11,11 @@ def random_sample_patches( which we call patch. Also sorts the patches, and makes sure only patches with higher entropy in the intensity histogram are selected. + To be able to work with any adoption_rate between 0 and 1, we accordingly + generate more patches per image during patch generation. After sorting, we + are able to apply the adoption rate to the total number of patches we generated + for each image. + Parameters ---------- image : numpy.ArrayLike @@ -21,6 +26,7 @@ def random_sample_patches( Returns ------- + List of Tuples of Slicing Objects """ list_of_slice_objects = [] diff --git a/aydin/restoration/denoise/noise2selfcnn.py b/aydin/restoration/denoise/noise2selfcnn.py index d00aeb72..3ba0b68e 100644 --- a/aydin/restoration/denoise/noise2selfcnn.py +++ b/aydin/restoration/denoise/noise2selfcnn.py @@ -184,7 +184,7 @@ def train(self, noisy_image, *, batch_axes=None, chan_axes=None, **kwargs): """ with lsection("Noise2Self train is starting..."): - if any(chan_axes): + if chan_axes: return self.it = self.get_translator()