Skip to content

Commit 25648ed

Browse files
author
Donglai Wei
committed
add reject_sample_num_trial
1 parent e2d4d81 commit 25648ed

File tree

3 files changed

+37
-10
lines changed

3 files changed

+37
-10
lines changed

connectomics/config/defaults.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -278,6 +278,7 @@
278278
_C.DATASET.REJECT_SAMPLING.SIZE_THRES = -1
279279
_C.DATASET.REJECT_SAMPLING.DIVERSITY = -1
280280
_C.DATASET.REJECT_SAMPLING.P = 0.95
281+
_C.DATASET.REJECT_SAMPLING.NUM_TRIAL = 50
281282

282283
# Normalize model inputs (the images are assumed to be gray-scale).
283284
_C.DATASET.MEAN = 0.5

connectomics/data/utils/data_crop.py

Lines changed: 36 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,7 @@
11
import numpy as np
22
from scipy.ndimage import convolve
3+
import torch
4+
from torch.nn.functional import conv2d, conv3d
35

46
####################################################################
57
## Process image stacks.
@@ -18,27 +20,52 @@ def crop_volume(data, sz, st=(0, 0, 0)):
1820
else: # crop spatial dimensions
1921
return data[:, st[0]:st[0]+sz[0], st[1]:st[1]+sz[1], st[2]:st[2]+sz[2]]
2022

21-
def get_valid_pos(mask, vol_sz, valid_ratio):
22-
mask_sum = convolve(mask, np.ones(vol_sz), mode='constant', cval=0)
23+
def get_valid_pos_torch(mask, vol_sz, valid_ratio):
24+
# torch version
25+
# bug: out of memory
2326
valid_thres = valid_ratio * np.prod(vol_sz)
2427
data_sz = mask.shape
25-
pad_sz_pre = (np.array(vol_sz) - 1) // 2
26-
pad_sz_post = data_sz - (vol_sz - pad_sz_pre - 1)
2728
if len(vol_sz) == 3:
28-
mask_sum = mask_sum[pad_sz_pre[0]:pad_sz_post[0], \
29-
pad_sz_pre[1]:pad_sz_post[1], \
30-
pad_sz_pre[2]:pad_sz_post[2]] >= valid_thres
29+
mask_sum = conv3d(torch.from_numpy(mask[None,None].astype(int)), torch.ones(tuple(vol_sz))[None,None], padding='valid')[0,0].numpy()>= valid_thres
3130
zz, yy, xx = np.meshgrid(np.arange(mask_sum.shape[0]), \
3231
np.arange(mask_sum.shape[1]), \
3332
np.arange(mask_sum.shape[2]))
3433
valid_pos = np.stack([zz.T[mask_sum], \
3534
yy.T[mask_sum], \
3635
xx.T[mask_sum]], axis=1)
3736
else:
38-
mask_sum = mask_sum[pad_sz_pre[0]:pad_sz_post[0], \
39-
pad_sz_pre[1]:pad_sz_post[1]] >= valid_thres
37+
mask_sum = conv2d(torch.from_numpy(mask[None,None].astype(int)), torch.ones(tuple(vol_sz))[None,None], padding='valid')[0,0].numpy()>= valid_thres
4038
yy, xx = np.meshgrid(np.arange(mask_sum.shape[0]), \
4139
np.arange(mask_sum.shape[1]))
4240
valid_pos = np.stack([yy.T[mask_sum], \
4341
xx.T[mask_sum]], axis=1)
4442
return valid_pos
43+
44+
def get_valid_pos(mask, vol_sz, valid_ratio):
45+
# scipy version
46+
valid_thres = valid_ratio * np.prod(vol_sz)
47+
data_sz = mask.shape
48+
mask_sum = convolve(mask.astype(int), np.ones(vol_sz), mode='constant', cval=0)
49+
pad_sz_pre = (np.array(vol_sz) - 1) // 2
50+
pad_sz_post = data_sz - (vol_sz - pad_sz_pre - 1)
51+
valid_pos = np.zeros([0,3])
52+
if len(vol_sz) == 3:
53+
mask_sum = mask_sum[pad_sz_pre[0]:pad_sz_post[0], \
54+
pad_sz_pre[1]:pad_sz_post[1], \
55+
pad_sz_pre[2]:pad_sz_post[2]] >= valid_thres
56+
if mask_sum.max() > 0:
57+
zz, yy, xx = np.meshgrid(np.arange(mask_sum.shape[0]), \
58+
np.arange(mask_sum.shape[1]), \
59+
np.arange(mask_sum.shape[2]))
60+
valid_pos = np.stack([zz.transpose([1,0,2])[mask_sum], \
61+
yy.transpose([1,0,2])[mask_sum], \
62+
xx.transpose([1,0,2])[mask_sum]], axis=1)
63+
else:
64+
mask_sum = mask_sum[pad_sz_pre[0]:pad_sz_post[0], \
65+
pad_sz_pre[1]:pad_sz_post[1]] >= valid_thres
66+
if mask_sum.max() > 0:
67+
yy, xx = np.meshgrid(np.arange(mask_sum.shape[0]), \
68+
np.arange(mask_sum.shape[1]))
69+
valid_pos = np.stack([yy.T[mask_sum], \
70+
xx.T[mask_sum]], axis=1)
71+
return valid_pos

setup.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,6 @@
1515
'h5py>=2.10.0',
1616
'gputil>=1.4.0',
1717
'imageio>=2.9.0',
18-
'tensorflow>=2.2.0',
1918
'tensorboard>=2.2.2',
2019
'einops>=0.3.0',
2120
'tqdm>=4.58.0',

0 commit comments

Comments
 (0)