Skip to content

Commit

Permalink
ROI tools
Browse files Browse the repository at this point in the history
  • Loading branch information
sronilsson committed May 3, 2024
1 parent 049ad5f commit 72951b5
Show file tree
Hide file tree
Showing 18 changed files with 810 additions and 1,062 deletions.
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@

setuptools.setup(
name="Simba-UW-tf-dev",
version="1.91.3",
version="1.91.5",
author="Simon Nilsson, Jia Jie Choong, Sophia Hwang",
author_email="[email protected]",
description="Toolkit for computer classification of behaviors in experimental animals",
Expand Down
4 changes: 1 addition & 3 deletions simba/SimBA.py
Original file line number Diff line number Diff line change
Expand Up @@ -1953,9 +1953,7 @@ def __init__(self):
wrap="none",
borderwidth=0,
)
self.txt.insert(
INSERT, Defaults.WELCOME_MSG.value + emojis["relaxed"] + "\n" * 2
)
self.txt.insert(INSERT, Defaults.WELCOME_MSG.value + emojis["relaxed"] + "\n" * 2)
self.txt.tag_add(TagNames.GREETING.value, "1.0", "3.25")
y_sb.pack(side=RIGHT, fill=Y)
self.txt.pack(expand=True, fill="both")
Expand Down
20 changes: 9 additions & 11 deletions simba/mixins/geometry_mixin.py
Original file line number Diff line number Diff line change
Expand Up @@ -3163,8 +3163,7 @@ def bucket_img_into_grid_hexagon(
return polygons, round((v_hex_cnt[0] / h_hex_cnt[0]), 3)

@staticmethod
def _cumsum_coord_geometries_helper(
data: np.ndarray, geometries: Dict[Tuple[int, int], Polygon], verbose: bool
def _cumsum_coord_geometries_helper(data: np.ndarray, geometries: Dict[Tuple[int, int], Polygon], verbose: bool
):
data_point = Point(data[1:])
if verbose:
Expand All @@ -3174,14 +3173,13 @@ def _cumsum_coord_geometries_helper(
return (int(data[0]), k[0], k[1])
return (int(data[0]), -1, -1)

def cumsum_coord_geometries(
self,
data: np.ndarray,
geometries: Dict[Tuple[int, int], Polygon],
fps: Optional[int] = None,
core_cnt: Optional[int] = -1,
verbose: Optional[bool] = True,
):
def cumsum_coord_geometries(self,
data: np.ndarray,
geometries: Dict[Tuple[int, int], Polygon],
fps: Optional[int] = None,
core_cnt: Optional[int] = -1,
verbose: Optional[bool] = True):

"""
Compute the cumulative time a body-part has spent inside a grid of geometries using multiprocessing.
Expand Down Expand Up @@ -3220,7 +3218,7 @@ def cumsum_coord_geometries(
data = np.hstack((frm_id, data))
img_arr = np.zeros((data.shape[0], h + 1, w + 1))
with multiprocessing.Pool(
core_cnt, maxtasksperchild=Defaults.LARGE_MAX_TASK_PER_CHILD.value
core_cnt, maxtasksperchild=Defaults.MAXIMUM_MAX_TASK_PER_CHILD.value
) as pool:
constants = functools.partial(
self._cumsum_coord_geometries_helper,
Expand Down
50 changes: 19 additions & 31 deletions simba/mixins/image_mixin.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,12 +51,12 @@ def __init__(self):
pass

@staticmethod
def brightness_intensity(
imgs: List[np.ndarray], ignore_black: Optional[bool] = True
) -> List[float]:
def brightness_intensity(imgs: List[np.ndarray], ignore_black: Optional[bool] = True) -> List[float]:
"""
Compute the average brightness intensity within each image within a list.
For example, (i) create a list of images containing a light cue ROI, (ii) compute brightness in each image, (iii) perform kmeans on brightness, and get the frames when the light cue is on vs off.
:param List[np.ndarray] imgs: List of images as arrays to calculate average brightness intensity within.
:param Optional[bool] ignore_black: If True, ignores black pixels. If the images are sliced non-rectangular geometric shapes created by ``slice_shapes_in_img``, then pixels that don't belong to the shape has been masked in black.
:returns List[float]: List of floats of size len(imgs) with brightness intensities.
Expand Down Expand Up @@ -90,17 +90,8 @@ def brightness_intensity(
@staticmethod
def gaussian_blur(img: np.ndarray, kernel_size: Optional[Tuple] = (9, 9)):
check_if_valid_img(data=img, source=ImageMixin.gaussian_blur.__name__)
check_instance(
source=ImageMixin.gaussian_blur.__name__,
instance=kernel_size,
accepted_types=(tuple,),
)
check_valid_lst(
data=list(kernel_size),
source=ImageMixin.gaussian_blur.__name__,
valid_dtypes=(int,),
exact_len=2,
)
check_instance(source=ImageMixin.gaussian_blur.__name__, instance=kernel_size, accepted_types=(tuple,))
check_valid_lst(data=list(kernel_size), source=ImageMixin.gaussian_blur.__name__, valid_dtypes=(int,), exact_len=2,)
return cv2.GaussianBlur(img, kernel_size, 0)

@staticmethod
Expand Down Expand Up @@ -171,13 +162,12 @@ def get_histocomparison(
)

@staticmethod
def get_contourmatch(
img_1: np.ndarray,
img_2: np.ndarray,
mode: Optional[Literal["all", "exterior"]] = "all",
method: Optional[Literal["simple", "none", "l2", "kcos"]] = "simple",
canny: Optional[bool] = True,
) -> float:
def get_contourmatch(img_1: np.ndarray,
img_2: np.ndarray,
mode: Optional[Literal["all", "exterior"]] = "all",
method: Optional[Literal["simple", "none", "l2", "kcos"]] = "simple",
canny: Optional[bool] = True) -> float:

"""
Calculate contour similarity between two images.
Expand Down Expand Up @@ -396,13 +386,13 @@ def find_contours(
interior_contours.append(cnts[i])

@staticmethod
def orb_matching_similarity_(
img_1: np.ndarray,
img_2: np.ndarray,
method: Literal["knn", "match", "radius"] = "knn",
mask: Optional[np.ndarray] = None,
threshold: Optional[int] = 0.75,
) -> int:
def orb_matching_similarity_(img_1: np.ndarray,
img_2: np.ndarray,
method: Literal["knn", "match", "radius"] = "knn",
mask: Optional[np.ndarray] = None,
threshold: Optional[int] = 0.75) -> int:


"""Perform ORB feature matching between two sets of images.
>>> img_1 = cv2.imread('/Users/simon/Desktop/envs/troubleshooting/khan/project_folder/videos/stitched_frames/0.png').astype(np.uint8)
Expand All @@ -416,9 +406,7 @@ def orb_matching_similarity_(
sliced_matches = None
if method == "knn":
matches = cv2.BFMatcher().knnMatch(des1, des2, k=2)
sliced_matches = [
m for m, n in matches if m.distance < threshold * n.distance
]
sliced_matches = [m for m, n in matches if m.distance < threshold * n.distance]
if method == "match":
matches = cv2.BFMatcher(cv2.NORM_HAMMING, crossCheck=True).match(des1, des2)
sliced_matches = [match for match in matches if match.distance <= threshold]
Expand Down
Loading

0 comments on commit 72951b5

Please sign in to comment.