Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Even faster cropping #152

Merged
merged 37 commits into from
Apr 26, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
37 commits
Select commit Hold shift + click to select a range
1a83f09
implemented better way to specifiy blindspots using a shorthand notat…
royerloic Apr 14, 2022
318bb58
Merge branch 'master' into minor_fixes
Apr 14, 2022
b4cd074
fixed inconsistencies of blindspot treatement between FGR/CNN and Cla…
Apr 15, 2022
6ff11d7
small fix
Apr 15, 2022
9d42412
I think I finally fixed Butterworth! It was kind of working but was q…
Apr 16, 2022
0bf0720
small test fix
Apr 16, 2022
95e05f5
small fix
royerloic Apr 16, 2022
e360fb3
fixed some issues with hcr and hela demos, tried GPU inference, not w…
royerloic Apr 16, 2022
bac393f
added dataset for OpenCell usecase.
royerloic Apr 16, 2022
1810f13
much faster and more accurate cropping
royerloic Apr 17, 2022
6d87ad6
new lowpass features for FGR enable new record on newyork test image
royerloic Apr 18, 2022
223f701
Merge branch 'master' into minor_fixes
royerloic Apr 18, 2022
dc381c6
faster (and better?) separable low pass features for FGR
Apr 19, 2022
b54a65c
faster correlation features (convolution!=correlation) using numba
Apr 19, 2022
13c2548
black and flake fixes
AhmetCanSolak Apr 20, 2022
9487d56
forgot correlation code!
Apr 20, 2022
0a24b94
black and flake8 fixes
AhmetCanSolak Apr 20, 2022
549dfef
forgot correlation code!
Apr 21, 2022
3e62e64
black fixes
AhmetCanSolak Apr 21, 2022
3bf6798
disabled snr_estimate broken asserts for now
AhmetCanSolak Apr 21, 2022
30686a8
flake8 fixes
AhmetCanSolak Apr 21, 2022
59bce69
Merge branch 'master' into minor_fixes
royerloic Apr 21, 2022
25c8660
Merge branch 'master' into final_fixes_before_new_release
royerloic Apr 21, 2022
c7ffabb
fixed demo mains, fixed big bug in calibration, and more small fixes
royerloic Apr 21, 2022
56e33fb
camera is back! fixed snr_estimate but still needs improvement
royerloic Apr 22, 2022
5dcda47
Merge branch 'master' into final_fixes_before_new_release
Apr 22, 2022
81acd2c
faster cropping
Apr 23, 2022
05f1c40
fixed issue with dim analysis
Apr 23, 2022
fbc66c6
Merge branch 'master' into final_fixes_before_new_release
royerloic Apr 23, 2022
473ad84
fixing tests
royerloic Apr 25, 2022
461f10d
small improvement
royerloic Apr 25, 2022
58c72ce
fix regression caused by over-zealous auto blindspot detection
royerloic Apr 25, 2022
d7c282b
some tuning
royerloic Apr 25, 2022
f5ba2a1
first commit
royerloic Apr 25, 2022
cdda199
trying everything to speed up cropping
Apr 26, 2022
030a529
Much faster cropping on ui.
royerloic Apr 26, 2022
c61d0d1
black and flake8 fixes
AhmetCanSolak Apr 26, 2022
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 5 additions & 5 deletions aydin/gui/tabs/qt/training_cropping.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
from qtpy.QtWidgets import QCheckBox

from aydin.gui.tabs.qt.base_cropping import BaseCroppingTab
from aydin.util.crop.rep_crop import representative_crop
from aydin.util.crop.sf_rep_crop import super_fast_representative_crop


class TrainingCroppingTab(BaseCroppingTab):
Expand Down Expand Up @@ -52,14 +52,14 @@ def images(self, images):

if len(images) == 1:
image = images[0][1]
response = representative_crop(
response = super_fast_representative_crop(
image,
mode='contrast' if image.size > 5_000_000 else 'sobelmin',
mode='contrast',
crop_size=2_000_000,
search_mode='random' if image.size > 10_000_000 else 'systematic',
search_mode='random' if image.size > 500_000_000 else 'systematic',
random_search_mode_num_crops=1024,
return_slice=True,
timeout_in_seconds=1,
timeout_in_seconds=1.5,
)

if type(response) == tuple:
Expand Down
21 changes: 18 additions & 3 deletions aydin/util/crop/demo/demo_rep_crop.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,19 @@
# flake8: noqa
import numpy
from numpy.random import normal
from skimage.data import camera

from aydin.io.datasets import dots, lizard, pollen, newyork, characters, examples_single
from aydin.io.datasets import (
dots,
lizard,
pollen,
newyork,
characters,
examples_single,
normalise,
)
from aydin.util.crop.rep_crop import representative_crop
from aydin.util.log.log import Log, lsection
from aydin.util.log.log import Log, lsection, lprint


def demo_representative_crop(
Expand All @@ -15,6 +25,9 @@ def demo_representative_crop(
Log.enable_output = True
Log.set_log_max_depth(5)

image = normalise(image.astype(numpy.float32))
image += 0.1 * normal(size=image.shape, scale=0.1)

def _crop_fun():
return representative_crop(
image, crop_size=crop_size, search_mode=search_mode, display_crop=False
Expand All @@ -35,7 +48,9 @@ def _crop_fun():
viewer.add_image(crop, name='crop')
napari.run()

assert crop.size <= int(crop_size * 1.25)
lprint(f"Crop size requested: {crop_size} obtained: {crop.size}")

assert crop.size >= int(crop_size * 0.75) and crop.size <= int(crop_size * 1.25)


if __name__ == "__main__":
Expand Down
89 changes: 89 additions & 0 deletions aydin/util/crop/demo/demo_sf_rep_crop.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,89 @@
# flake8: noqa
import numpy
from numpy.random import normal
from skimage.data import camera

from aydin.io.datasets import (
dots,
lizard,
pollen,
newyork,
characters,
examples_single,
normalise,
)
from aydin.util.crop.sf_rep_crop import super_fast_representative_crop
from aydin.util.log.log import Log, lsection, lprint


def demo_super_fast_representative_crop(image, crop_size=64000, display: bool = True):
"""
Demo for self-supervised denoising using camera image with synthetic noise
"""
Log.enable_output = True
Log.set_log_max_depth(5)

image = normalise(image.astype(numpy.float32))
image += 0.1 * normal(size=image.shape, scale=0.1)

def _crop_fun():
return super_fast_representative_crop(
image, crop_size=crop_size, display_crop=False
)

# Warmup (numba compilation)
# _crop_fun()

with lsection(f"Computing crop for image of shape: {image.shape}"):
# for _ in range(10):
crop = _crop_fun()

if display:
import napari

viewer = napari.Viewer()
viewer.add_image(image, name='image')
viewer.add_image(crop, name='crop')
napari.run()

lprint(f"Crop size requested: {crop_size} obtained: {crop.size}")

assert crop.size >= int(crop_size * 0.5) and crop.size <= int(crop_size * 2)


if __name__ == "__main__":

demo_super_fast_representative_crop(
examples_single.royerlab_hcr.get_array().squeeze(),
crop_size=1_000_000,
)

demo_super_fast_representative_crop(
examples_single.maitre_mouse.get_array(), crop_size=1_000_000
)

demo_super_fast_representative_crop(
examples_single.leonetti_arhgap21.get_array(), crop_size=1_000_000
)

demo_super_fast_representative_crop(
examples_single.royerlab_hcr.get_array().squeeze()[:, 0, ...],
crop_size=1_000_000,
)

demo_super_fast_representative_crop(
examples_single.royerlab_hcr.get_array().squeeze()[:, 1, ...],
crop_size=1_000_000,
)

demo_super_fast_representative_crop(
examples_single.royerlab_hcr.get_array().squeeze()[:, 2, ...],
crop_size=1_000_000,
)

demo_super_fast_representative_crop(newyork())
demo_super_fast_representative_crop(camera())
demo_super_fast_representative_crop(characters())
demo_super_fast_representative_crop(pollen())
demo_super_fast_representative_crop(lizard())
demo_super_fast_representative_crop(dots())
36 changes: 24 additions & 12 deletions aydin/util/crop/rep_crop.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ def representative_crop(
equal_sides: bool = False,
favour_odd_lengths: bool = False,
search_mode: str = 'random',
granularity_factor: int = 4,
random_search_mode_num_crops: int = 1512,
min_num_crops: int = 512,
timeout_in_seconds: float = 2,
Expand Down Expand Up @@ -66,6 +67,9 @@ def representative_crop(
random mode we pick random crops, in systematic mode we check every
possible strided crop.

granularity_factor: int
Granularity of search. higher values correspond to more overlap between candidate crops.

random_search_mode_num_crops: int
Number of crops to check in 'random' search mode.

Expand Down Expand Up @@ -111,9 +115,9 @@ def representative_crop(

with lsection("Cast and normalise image..."):
# Cast, if needed:
image = image.astype(numpy.float32)
image = image.astype(numpy.float32, copy=False)
# Normalise:
image = _normalise(image)
# image = _normalise(image)

# Apply filter:
with lsection(f"Apply cropping filter to image of shape: {image.shape}"):
Expand Down Expand Up @@ -163,7 +167,10 @@ def representative_crop(

if ratio >= 1:
# If the image is small enough no point in getting a crop!
return image
if return_slice:
return image, (slice(None),) * image.ndim
else:
return image

# cropped shape:
cropped_shape = tuple(
Expand Down Expand Up @@ -210,10 +217,10 @@ def representative_crop(
# Instead of searching for all possible crops, we take into
# account the size of the crops to define a 'granularity' (
# stride) of the translations used for search:
granularity_factor = 4

granularity = tuple(cs // granularity_factor for cs in cropped_shape)

if search_mode == 'random' or image.size > 1e6:
if search_mode == 'random':

# We make sure that the number of crops is not too large given
# the relative size of the crop versus whole image:
Expand Down Expand Up @@ -272,11 +279,16 @@ def _crop_slice(translation, cropped_shape, downscale: int = 1):
for r, cs in zip(translation_range, cropped_shape)
)

for i, i in enumerate(numpy.ndindex(translation_indices)):
for i, index in enumerate(numpy.ndindex(translation_indices)):

# print(
# f"i={i}, index={index}, translation_indices={translation_indices}"
# )

# translation:
translation = tuple(
int(i * cs / granularity_factor) for i, cs in zip(i, cropped_shape)
int(j * cs / granularity_factor)
for j, cs in zip(index, cropped_shape)
)

# slice object for cropping:
Expand Down Expand Up @@ -309,10 +321,10 @@ def _crop_slice(translation, cropped_shape, downscale: int = 1):

import napari

with napari.gui_qt():
viewer = napari.Viewer()
viewer.add_image(image, name='image')
viewer.add_image(best_crop, name='best_crop')
viewer = napari.Viewer()
viewer.add_image(image.squeeze(), name='image')
viewer.add_image(best_crop.squeeze(), name='best_crop')
napari.run()

# print(_fast_std.signatures)
# for sig in _fast_std.signatures:
Expand Down Expand Up @@ -391,7 +403,7 @@ def _rescale(x, min_value, max_value):


@jit(nopython=True, parallel=True, fastmath=True)
def _fast_std(image: ArrayLike, workers=32, decimation=1):
def _fast_std(image: ArrayLike, workers=16, decimation=1):

array = image.ravel()
length = array.size
Expand Down
103 changes: 103 additions & 0 deletions aydin/util/crop/sf_rep_crop.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,103 @@
from memoization.memoization import cached
from numpy.typing import ArrayLike
from scipy.ndimage import zoom

from aydin.util.crop.rep_crop import representative_crop
from aydin.util.log.log import lprint, lsection


@cached(ttl=10, max_size=5)
def super_fast_representative_crop(
image: ArrayLike,
crop_size: int,
min_length: int = 8,
search_mode: str = 'systematic',
granularity_factor: int = 3,
return_slice: bool = False,
min_scaling_factor: int = 2,
*args,
**kwargs,
):
"""

Parameters
----------
Same parameters as 'representative_crop' with the addition of:

min_scaling_factor: int
Minimal downscaling factor per axis.


Returns
-------
Most representative crop, and if return_slice is True the actual slice object too.

"""
with lsection(f"Super fast cropping image of size: {image.shape}"):

# Compute downscale facto per dimension:
def _downscale(length):
return min(max(min_scaling_factor, length // 256), min_length)

downscale_factor = tuple(
_downscale(s) if s >= min_length else min_length // 2 for s in image.shape
)
lprint(f"Scaling by factors: {downscale_factor}")

# Compute zoom factor
zoom_per_axis = tuple(1.0 / d for d in downscale_factor)

# Downsample image:
with lsection(f"Downscaling image of shape: {image.shape}..."):
image_d = zoom(image, zoom=zoom_per_axis, prefilter=False, order=0)

# Compute overall zoom factor:
overall_zoom = image_d.size / image.size

# Compute the scaled-down crop_size:
crop_size = int(crop_size * overall_zoom)

# Delegate cropping:
_, slice_ = representative_crop(
image_d,
crop_size=crop_size,
search_mode=search_mode,
granularity_factor=granularity_factor,
min_length=min_length,
return_slice=True,
*args,
**kwargs,
)

# Normalise Slice:
# Upscale slice:
slice_ = tuple(
slice(
0 if sl.start is None else sl.start,
s if sl.stop is None else sl.stop,
1,
)
for sl, s in zip(slice_, image_d.shape)
)

# Upscale slice:
slice_ = tuple(
slice(sl.start * s, sl.stop * s, 1)
for sl, s in zip(slice_, downscale_factor)
)

# Clip slice to dimensions of image:
slice_ = tuple(
slice(max(sl.start, 0), min(sl.stop, s), 1)
for sl, s in zip(slice_, image.shape)
)

# Crop Image:
crop = image[slice_]

# Returns:
if return_slice:
# Return slice if requested:
return crop, slice_
else:
return crop
6 changes: 6 additions & 0 deletions aydin/util/crop/test/test_rep_crop.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,14 @@

from aydin.io.datasets import newyork
from aydin.util.crop.demo.demo_rep_crop import demo_representative_crop
from aydin.util.crop.demo.demo_sf_rep_crop import demo_super_fast_representative_crop


def test_representative_crop():
newyork_image = newyork()
demo_representative_crop(newyork_image, display=False)


def test_super_fast_representative_crop():
newyork_image = newyork()
demo_super_fast_representative_crop(newyork_image, display=False)
1 change: 1 addition & 0 deletions setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -52,3 +52,4 @@ install_requires =
keras==2.7.0
zarr==2.4.0
imagecodecs==2022.2.22
memoization==0.4.0