SlideShare a Scribd company logo
밑바닥부터 시작하는 의료AI
루닛 (Lunit)
김준호
https://github.com/taki0112
2
3
4
5





6
7
8
9
10
11
12
13
14
15
16
17
18
19
def read_csv(filename):
lines = []
with open(filename, 'r') as f:
csvreader = csv.reader(f)
for line in csvreader:
lines.append(line)
lines = lines[1:] # remove csv headers
annotations_dict = {}
for i in lines:
series_uid, x, y, z, diameter = i
value = {'position':[float(x),float(y),float(z)],
'diameter':float(diameter)}
if series_uid in annotations_dict.keys():
annotations_dict[series_uid].append(value)
else:
annotations_dict[series_uid] = [value]
return annotations_dict
20
21
22
def load_itk_image(filename):
itkimage = sitk.ReadImage(filename)
numpyImage = sitk.GetArrayFromImage(itkimage)
numpyOrigin = np.array(list(reversed(itkimage.GetOrigin())))
numpySpacing = np.array(list(reversed(itkimage.GetSpacing())))
return numpyImage, numpyOrigin, numpySpacing
23
def load_itk_image(filename):
itkimage = sitk.ReadImage(filename)
numpyImage = sitk.GetArrayFromImage(itkimage)
numpyOrigin = np.array(list(reversed(itkimage.GetOrigin())))
numpySpacing = np.array(list(reversed(itkimage.GetSpacing())))
return numpyImage, numpyOrigin, numpySpacing
24
def load_itk_image(filename):
itkimage = sitk.ReadImage(filename)
numpyImage = sitk.GetArrayFromImage(itkimage)
numpyOrigin = np.array(list(reversed(itkimage.GetOrigin())))
numpySpacing = np.array(list(reversed(itkimage.GetSpacing())))
return numpyImage, numpyOrigin, numpySpacing
25
def load_itk_image(filename):
itkimage = sitk.ReadImage(filename)
numpyImage = sitk.GetArrayFromImage(itkimage)
numpyOrigin = np.array(list(reversed(itkimage.GetOrigin())))
numpySpacing = np.array(list(reversed(itkimage.GetSpacing())))
return numpyImage, numpyOrigin, numpySpacing
def load_itk_image(filename):
itkimage = sitk.ReadImage(filename)
numpyImage = np.transpose(sitk.GetArrayFromImage(itkimage))
numpyOrigin = np.array(itkimage.GetOrigin())
numpySpacing = np.array(itkimage.GetSpacing())
return numpyImage, numpyOrigin, numpySpacing
26
def load_itk_image(filename):
itkimage = sitk.ReadImage(filename)
numpyImage = sitk.GetArrayFromImage(itkimage)
numpyOrigin = np.array(list(reversed(itkimage.GetOrigin())))
numpySpacing = np.array(list(reversed(itkimage.GetSpacing())))
return numpyImage, numpyOrigin, numpySpacing
def load_itk_image(filename):
itkimage = sitk.ReadImage(filename)
numpyImage = np.transpose(sitk.GetArrayFromImage(itkimage))
numpyOrigin = np.array(itkimage.GetOrigin())
numpySpacing = np.array(itkimage.GetSpacing())
return numpyImage, numpyOrigin, numpySpacing
27
28
29
30
31
32
33
34
def resample(image, org_spacing, new_spacing=OUTPUT_SPACING):
resize_factor = org_spacing / new_spacing
new_real_shape = image.shape * resize_factor
new_shape = np.round(new_real_shape)
image = scipy.ndimage.interpolation.zoom(image, real_resize_factor, mode='nearest')
return image
35
def resample(image, org_spacing, new_spacing=OUTPUT_SPACING):
resize_factor = org_spacing / new_spacing
new_real_shape = image.shape * resize_factor
new_shape = np.round(new_real_shape)
image = scipy.ndimage.interpolation.zoom(image, real_resize_factor, mode='nearest')
return image
36
def resample(image, org_spacing, new_spacing=OUTPUT_SPACING):
resize_factor = org_spacing / new_spacing
new_real_shape = image.shape * resize_factor
new_shape = np.round(new_real_shape)
image = scipy.ndimage.interpolation.zoom(image, real_resize_factor, mode='nearest')
return image
37
def resample(image, org_spacing, new_spacing=OUTPUT_SPACING):
resize_factor = org_spacing / new_spacing
new_real_shape = image.shape * resize_factor
new_shape = np.round(new_real_shape)
image = scipy.ndimage.interpolation.zoom(image, real_resize_factor, mode='nearest')
return image
38
def resample(image, org_spacing, new_spacing=OUTPUT_SPACING):
resize_factor = org_spacing / new_spacing
new_real_shape = image.shape * resize_factor
new_shape = np.round(new_real_shape)
image = scipy.ndimage.interpolation.zoom(image, real_resize_factor, mode='nearest')
return image
39
def resample(image, org_spacing, new_spacing=OUTPUT_SPACING):
resize_factor = org_spacing / new_spacing
new_real_shape = image.shape * resize_factor
new_shape = np.round(new_real_shape)
image = scipy.ndimage.interpolation.zoom(image, real_resize_factor, mode='nearest')
return image
40
41
42
43
44
45
def normalize_planes(npzarray):
maxHU = 400.
minHU = -1000.
npzarray = (npzarray - minHU) / (maxHU - minHU)
npzarray[npzarray > 1] = 1.
npzarray[npzarray < 0] = 0.
return npzarray
46
def normalize_planes(npzarray):
maxHU = 400.
minHU = -1000.
npzarray = (npzarray - minHU) / (maxHU - minHU)
npzarray[npzarray > 1] = 1.
npzarray[npzarray < 0] = 0.
return npzarray
def zero_center(image):
PIXEL_MEAN = 0.25
image = image - PIXEL_MEAN
return image
47
48
49
50
51
def load_itk_image(filename):
…
return numpyImage, numpyOrigin, numpySpacing
def read_csv(filename):
…
return annotations_dict
OUTPUT_SPACING = [1.25, 1.25, 1.25]
52
def load_itk_image(filename):
…
return numpyImage, numpyOrigin, numpySpacing
def read_csv(filename):
…
return annotations_dict
OUTPUT_SPACING = [1.25, 1.25, 1.25]
53
def load_itk_image(filename):
…
return numpyImage, numpyOrigin, numpySpacing
def world_2_voxel(world_coord, origin, spacing):
stretched_voxel_coord = np.absolute(world_coord - origin)
voxel_coord = stretched_voxel_coord / spacing
return voxel_coord
def read_csv(filename):
…
return annotations_dict
OUTPUT_SPACING = [1.25, 1.25, 1.25]
54
def load_itk_image(filename):
…
return numpyImage, numpyOrigin, numpySpacing
def world_2_voxel(world_coord, origin, spacing):
stretched_voxel_coord = np.absolute(world_coord - origin)
voxel_coord = stretched_voxel_coord / spacing
return voxel_coord
def read_csv(filename):
…
return annotations_dict
OUTPUT_SPACING = [1.25, 1.25, 1.25]
55
def load_itk_image(filename):
…
return numpyImage, numpyOrigin, numpySpacing
def world_2_voxel(world_coord, origin, spacing):
stretched_voxel_coord = np.absolute(world_coord - origin)
voxel_coord = stretched_voxel_coord / spacing
return voxel_coord
def read_csv(filename):
…
return annotations_dict
OUTPUT_SPACING = [1.25, 1.25, 1.25]
56
def create_label(arr_shape, nodules, new_spacing, coord=False):
# nodules = list of dict {‘position’, ‘diameter}
for nodule in nodules:
worldCoord = nodule['position']
worldCoord = np.asarray([worldCoord[2],worldCoord[1],worldCoord[0]])
# new_spacing came from resample
voxelCoord = word_2_voxel(worldCoord, origin, new_spacing)
voxelCoord = [int(i) for i in voxelCoord]
diameter = nodule['diameter']
diameter = diameter / new_spacing[1]
label = _create_mask(arr_shape, voxelCoord, diameter)
return label
57
def create_label(arr_shape, nodules, new_spacing, coord=False):
# nodules = list of dict {‘position’, ‘diameter}
for nodule in nodules:
worldCoord = nodule['position']
worldCoord = np.asarray([worldCoord[2],worldCoord[1],worldCoord[0]])
# new_spacing came from resample
voxelCoord = word_2_voxel(worldCoord, origin, new_spacing)
voxelCoord = [int(i) for i in voxelCoord]
diameter = nodule['diameter']
diameter = diameter / new_spacing[1]
label = _create_mask(arr_shape, voxelCoord, diameter)
return label
58
def create_label(arr_shape, nodules, new_spacing, coord=False):
# nodules = list of dict {‘position’, ‘diameter}
for nodule in nodules:
worldCoord = nodule['position']
worldCoord = np.asarray([worldCoord[2],worldCoord[1],worldCoord[0]])
# new_spacing came from resample
voxelCoord = word_2_voxel(worldCoord, origin, new_spacing)
voxelCoord = [int(i) for i in voxelCoord]
diameter = nodule['diameter']
diameter = diameter / new_spacing[1]
label = _create_mask(arr_shape, voxelCoord, diameter)
return label
59
def create_label(arr_shape, nodules, new_spacing, coord=False):
# nodules = list of dict {‘position’, ‘diameter}
for nodule in nodules:
worldCoord = nodule['position']
worldCoord = np.asarray([worldCoord[2],worldCoord[1],worldCoord[0]])
# new_spacing came from resample
voxelCoord = word_2_voxel(worldCoord, origin, new_spacing)
voxelCoord = [int(i) for i in voxelCoord]
diameter = nodule['diameter']
diameter = diameter / new_spacing[1]
label = _create_mask(arr_shape, voxelCoord, diameter)
return label
60
def _create_mask(arr_shape, position, diameter):
z_dim, y_dim, x_dim = arr_shape
z_pos, y_pos, x_pos = position
z,y,x = np.ogrid[-z_pos:z_dim-z_pos,
-y_pos:y_dim-y_pos,
-x_pos:x_dim-x_pos]
mask = z**2 + y**2 + x**2 <= int(diameter//2)**2
return mask
def create_label(arr_shape, nodules, new_spacing, coord=False):
# nodules = list of dict {‘position’, ‘diameter}
for nodule in nodules:
worldCoord = nodule['position']
worldCoord = np.asarray([worldCoord[2],worldCoord[1],worldCoord[0]])
# new_spacing came from resample
voxelCoord = word_2_voxel(worldCoord, origin, new_spacing)
voxelCoord = [int(i) for i in voxelCoord]
diameter = nodule['diameter']
diameter = diameter / new_spacing[1]
label = _create_mask(arr_shape, voxelCoord, diameter)
return label
61
def _create_mask(arr_shape, position, diameter):
z_dim, y_dim, x_dim = arr_shape
z_pos, y_pos, x_pos = position
z,y,x = np.ogrid[-z_pos:z_dim-z_pos,
-y_pos:y_dim-y_pos,
-x_pos:x_dim-x_pos]
mask = z**2 + y**2 + x**2 <= int(diameter//2)**2
return mask
def create_label(arr_shape, nodules, new_spacing, coord=False):
# nodules = list of dict {‘position’, ‘diameter}
for nodule in nodules:
worldCoord = nodule['position']
worldCoord = np.asarray([worldCoord[2],worldCoord[1],worldCoord[0]])
# new_spacing came from resample
voxelCoord = word_2_voxel(worldCoord, origin, new_spacing)
voxelCoord = [int(i) for i in voxelCoord]
diameter = nodule['diameter']
diameter = diameter / new_spacing[1]
label = _create_mask(arr_shape, voxelCoord, diameter)
return label
-2 -1 x 1 2
62
def _create_mask(arr_shape, position, diameter):
z_dim, y_dim, x_dim = arr_shape
z_pos, y_pos, x_pos = position
z,y,x = np.ogrid[-z_pos:z_dim-z_pos,
-y_pos:y_dim-y_pos,
-x_pos:x_dim-x_pos]
mask = z**2 + y**2 + x**2 <= int(diameter//2)**2
return mask
False True x True False
def create_label(arr_shape, nodules, new_spacing, coord=False):
# nodules = list of dict {‘position’, ‘diameter}
for nodule in nodules:
worldCoord = nodule['position']
worldCoord = np.asarray([worldCoord[2],worldCoord[1],worldCoord[0]])
# new_spacing came from resample
voxelCoord = word_2_voxel(worldCoord, origin, new_spacing)
voxelCoord = [int(i) for i in voxelCoord]
diameter = nodule['diameter']
diameter = diameter / new_spacing[1]
label = _create_mask(arr_shape, voxelCoord, diameter)
return label
63
def _create_mask(arr_shape, position, diameter):
z_dim, y_dim, x_dim = arr_shape
z_pos, y_pos, x_pos = position
z,y,x = np.ogrid[-z_pos:z_dim-z_pos,
-y_pos:y_dim-y_pos,
-x_pos:x_dim-x_pos]
mask = z**2 + y**2 + x**2 <= int(diameter//2)**2
return mask
False True x True False
def create_label(arr_shape, nodules, new_spacing, coord=False):
# nodules = list of dict {‘position’, ‘diameter}
for nodule in nodules:
worldCoord = nodule['position']
worldCoord = np.asarray([worldCoord[2],worldCoord[1],worldCoord[0]])
# new_spacing came from resample
voxelCoord = word_2_voxel(worldCoord, origin, new_spacing)
voxelCoord = [int(i) for i in voxelCoord]
diameter = nodule['diameter']
diameter = diameter / new_spacing[1]
label = _create_mask(arr_shape, voxelCoord, diameter)
return label
64
65
66
67
68
69
70
71
72
73
74
offset = patch_size // 2
image = np.pad(image, offset, 'constant', constant_values=np.min(image))
75
offset = patch_size // 2
image = np.pad(image, offset, 'constant', constant_values=np.min(image))
76
offset = patch_size // 2
image = np.pad(image, offset, 'constant', constant_values=np.min(image))
77
offset = patch_size // 2
image = np.pad(image, offset, 'constant', constant_values=np.min(image))
78
offset = patch_size // 2
image = np.pad(image, offset, 'constant', constant_values=np.min(image))
79
offset = patch_size // 2
image = np.pad(image, offset, 'constant', constant_values=np.min(image))
80
offset = patch_size // 2
stride = 8
move = offset // stride
image = np.pad(image, offset + (stride * move), 'constant', constant_values=np.min(image))
81
offset = patch_size // 2
stride = 8
move = offset // stride
image = np.pad(image, offset + (stride * move), 'constant', constant_values=np.min(image))
82
offset = patch_size // 2
stride = 8
move = offset // stride
image = np.pad(image, offset + (stride * move), 'constant', constant_values=np.min(image))
83
offset = patch_size // 2
stride = 8
move = offset // stride
image = np.pad(image, offset + (stride * move), 'constant', constant_values=np.min(image))
84
offset = patch_size // 2
stride = 8
move = offset // stride
image = np.pad(image, offset + (stride * move), 'constant', constant_values=np.min(image))
85
86
87
88
89
90
with h5py.File(save_path + 'subset' + str(i) + '.h5', 'w') as hf:
hf.create_dataset('nodule', data=nodule[:], compression='lzf')
hf.create_dataset('label_nodule', data=nodule_label[:], compression='lzf')
hf.create_dataset('non_nodule', data=non_nodule[:], compression='lzf')
hf.create_dataset('label_non_nodule', data=non_nodule_label[:], compression='lzf')
91
92
93
94
95
import tftables
import tensorflow as tf
def input_transform(tbl_batch):
labels = tbl_batch['nodule_label']
data = tbl_batch['nodule']
return labels, data
loader = tftables.load_dataset(filename='/data/LUNA16/subset1.h5',
dataset_path='/internal/h5/path',
input_transform=input_transform,
queue_size=256,
batch_size=16)
truth_batch, data_batch = loader.dequeue()
result = CASED(truth_batch, data_batch)
with tf.Session() as sess:
with loader.begin(sess):
for _ in range(num_iterations):
sess.run(result)
96
import tftables
import tensorflow as tf
def input_transform(tbl_batch):
labels = tbl_batch['nodule_label']
data = tbl_batch['nodule']
return labels, data
loader = tftables.load_dataset(filename='/data/LUNA16/subset1.h5',
dataset_path='/internal/h5/path',
input_transform=input_transform,
queue_size=256,
batch_size=16)
truth_batch, data_batch = loader.dequeue()
result = CASED(truth_batch, data_batch)
with tf.Session() as sess:
with loader.begin(sess):
for _ in range(num_iterations):
sess.run(result)
97
import tftables
import tensorflow as tf
def input_transform(tbl_batch):
labels = tbl_batch['nodule_label']
data = tbl_batch['nodule']
return labels, data
loader = tftables.load_dataset(filename='/data/LUNA16/subset1.h5',
dataset_path='/internal/h5/path',
input_transform=input_transform,
queue_size=256,
batch_size=16)
truth_batch, data_batch = loader.dequeue()
result = CASED(truth_batch, data_batch)
with tf.Session() as sess:
with loader.begin(sess):
for _ in range(num_iterations):
sess.run(result)
98
import tftables
import tensorflow as tf
def input_transform(tbl_batch):
labels = tbl_batch['nodule_label']
data = tbl_batch['nodule']
return labels, data
loader = tftables.load_dataset(filename='/data/LUNA16/subset1.h5',
dataset_path='/internal/h5/path',
input_transform=input_transform,
queue_size=256,
batch_size=16)
truth_batch, data_batch = loader.dequeue()
result = CASED(truth_batch, data_batch)
with tf.Session() as sess:
with loader.begin(sess):
for _ in range(num_iterations):
sess.run(result)
99
import tftables
import tensorflow as tf
def input_transform(tbl_batch):
labels = tbl_batch['nodule_label']
data = tbl_batch['nodule']
return labels, data
loader = tftables.load_dataset(filename='/data/LUNA16/subset1.h5',
dataset_path='/internal/h5/path',
input_transform=input_transform,
queue_size=256,
batch_size=16)
truth_batch, data_batch = loader.dequeue()
result = CASED(truth_batch, data_batch)
with tf.Session() as sess:
with loader.begin(sess):
for _ in range(num_iterations):
sess.run(result)
100
import tftables
import tensorflow as tf
def input_transform(tbl_batch):
labels = tbl_batch['nodule_label']
data = tbl_batch['nodule']
return labels, data
loader = tftables.load_dataset(filename='/data/LUNA16/subset1.h5',
dataset_path='/internal/h5/path',
input_transform=input_transform,
queue_size=256,
batch_size=16)
truth_batch, data_batch = loader.dequeue()
result = CASED(truth_batch, data_batch)
with tf.Session() as sess:
with loader.begin(sess):
for _ in range(num_iterations):
sess.run(result)
101
102
103
104
105
106
107
108
109
110
111
112
x = scipy.ndimage.zoom(input=x, zoom=1/8, order=1) # nearest
x = np.where(x > 0, 1.0, 0.0)
113
x = scipy.ndimage.zoom(input=x, zoom=1/8, order=1) # nearest
x = np.where(x > 0, 1.0, 0.0)
114
x = scipy.ndimage.zoom(input=x, zoom=1/8, order=1) # nearest
x = np.where(x > 0, 1.0, 0.0)
x = skimage.measure.block_reduce(x, (9, 9, 9), np.max)
115
x = scipy.ndimage.zoom(input=x, zoom=1/8, order=1) # nearest
x = np.where(x > 0, 1.0, 0.0)
x = skimage.measure.block_reduce(x, (9, 9, 9), np.max)
116
117
118
𝒈 𝒏
119
𝒈 𝒏
𝒈 𝒓
120
𝒈 𝒏
𝒈 𝒓
121
𝒈 𝒏
𝒈 𝒓
122
𝒈 𝒏
𝒈 𝒓
123
𝒈 𝒏
𝒈 𝒓
124
𝒈 𝒓
𝒈 𝒏
125
𝒈 𝒓
𝒈 𝒏
126
𝒈 𝒓
𝒈 𝒏
127
𝒈 𝒓
𝒈 𝒏
128
𝒈 𝒏
𝒈 𝒓
129
𝒈 𝒏
𝒈 𝒓
p_x = 1.0
for i in iteration :
p = uniform(0,1)
if p <= p_x :
# only nodule patch
g_n_index = np.random.choice(N, size=batch_size, replace=False)
batch_patch = nodule_patch[g_n_index]
batch_y = nodule_patch_y[g_n_index]
else :
# all patch
predictor_dict = Predictor(all_patch) # key = index, value = loss
g_r_index = nlargest(batch_size, predictor_dict, key=predictor_dict.get)
batch_patch = all_patch[g_r_index]
batch_y = all_patch_y[g_r_index]
p_x *= pow(1/M, 1/iteration) # p_x -> 0
130
131
132
133
CASED
134
CASED OUTPUT
135
CASED OUTPUT
136
CASED OUTPUT
137
CASED OUTPUT
138
CASED OUTPUT
139
140
exclude.csv
141
exclude.csv
142
exclude.csv
143
exclude.csv
144
exclude.csv
145
exclude.csv
def fp_per_scan(logit, label) :
fp_list = [0.125, 0.25, 0.5, 1, 2, 4, 8]
MIN_FROC = 0.125
MAX_FROC = 8
logit = np.reshape(logit, -1)
label = np.reshape(label, -1)
fpr, tpr, th = roc_curve(label, logit, pos_label=1.0)
fps = fpr * negative_samples
fps_itp = np.linspace(MIN_FROC, MAX_FROC, num=64)
sens_itp = np.interp(fps_itp, fps, tpr)
146
exclude.csv
def fp_per_scan(logit, label) :
fp_list = [0.125, 0.25, 0.5, 1, 2, 4, 8]
MIN_FROC = 0.125
MAX_FROC = 8
logit = np.reshape(logit, -1)
label = np.reshape(label, -1)
fpr, tpr, th = roc_curve(label, logit, pos_label=1.0)
fps = fpr * negative_samples
fps_itp = np.linspace(MIN_FROC, MAX_FROC, num=64)
sens_itp = np.interp(fps_itp, fps, tpr)
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
from multiprocessing import Pool
def nodule_hf(idx):
with h5py.File(image_patch, 'r') as hf:
nodule = hf['nodule'][idx:idx + get_data_num]
return nodule
process_num = 32
get_data_num = 64
with h5py.File(image_patch, 'r') as fin:
nodule_range = range(0, len(fin['nodule']), get_data_num)
pool = Pool(processes = process_num)
pool_nodule = pool.map(nodule_hf, nodule_range)
pool.close()
170
from multiprocessing import Pool
def nodule_hf(idx):
with h5py.File(image_patch, 'r') as hf:
nodule = hf['nodule'][idx:idx + get_data_num]
return nodule
process_num = 32
get_data_num = 64
with h5py.File(image_patch, 'r') as fin:
nodule_range = range(0, len(fin['nodule']), get_data_num)
pool = Pool(processes = process_num)
pool_nodule = pool.map(nodule_hf, nodule_range)
pool.close()
171
from multiprocessing import Pool
def nodule_hf(idx):
with h5py.File(image_patch, 'r') as hf:
nodule = hf['nodule'][idx:idx + get_data_num]
return nodule
process_num = 32
get_data_num = 64
with h5py.File(image_patch, 'r') as fin:
nodule_range = range(0, len(fin['nodule']), get_data_num)
pool = Pool(processes = process_num)
pool_nodule = pool.map(nodule_hf, nodule_range)
pool.close()
172
from multiprocessing import Pool
def nodule_hf(idx):
with h5py.File(image_patch, 'r') as hf:
nodule = hf['nodule'][idx:idx + get_data_num]
return nodule
process_num = 32
get_data_num = 64
with h5py.File(image_patch, 'r') as fin:
nodule_range = range(0, len(fin['nodule']), get_data_num)
pool = Pool(processes = process_num)
pool_nodule = pool.map(nodule_hf, nodule_range)
pool.close()
173
from multiprocessing import Pool
def nodule_hf(idx):
with h5py.File(image_patch, 'r') as hf:
nodule = hf['nodule'][idx:idx + get_data_num]
return nodule
process_num = 32
get_data_num = 64
with h5py.File(image_patch, 'r') as fin:
nodule_range = range(0, len(fin['nodule']), get_data_num)
pool = Pool(processes = process_num)
pool_nodule = pool.map(nodule_hf, nodule_range)
pool.close()
174
from multiprocessing import Pool
def nodule_hf(idx):
with h5py.File(image_patch, 'r') as hf:
nodule = hf['nodule'][idx:idx + get_data_num]
return nodule
process_num = 32
get_data_num = 64
with h5py.File(image_patch, 'r') as fin:
nodule_range = range(0, len(fin['nodule']), get_data_num)
pool = Pool(processes = process_num)
pool_nodule = pool.map(nodule_hf, nodule_range)
pool.close()
175
from multiprocessing import Pool
def nodule_hf(idx):
with h5py.File(image_patch, 'r') as hf:
nodule = hf['nodule'][idx:idx + get_data_num]
return nodule
process_num = 32
get_data_num = 64
with h5py.File(image_patch, 'r') as fin:
nodule_range = range(0, len(fin['nodule']), get_data_num)
pool = Pool(processes = process_num)
pool_nodule = pool.map(nodule_hf, nodule_range)
pool.close()
176
from multiprocessing import Pool
def nodule_hf(idx):
with h5py.File(image_patch, 'r') as hf:
nodule = hf['nodule'][idx:idx + get_data_num]
return nodule
process_num = 32
get_data_num = 64
with h5py.File(image_patch, 'r') as fin:
nodule_range = range(0, len(fin['nodule']), get_data_num)
pool = Pool(processes = process_num)
pool_nodule = pool.map(nodule_hf, nodule_range)
pool.close()
177
from multiprocessing import Pool
def nodule_hf(idx):
with h5py.File(image_patch, 'r') as hf:
nodule = hf['nodule'][idx:idx + get_data_num]
return nodule
process_num = 32
get_data_num = 64
with h5py.File(image_patch, 'r') as fin:
nodule_range = range(0, len(fin['nodule']), get_data_num)
pool = Pool(processes = process_num)
pool_nodule = pool.map(nodule_hf, nodule_range)
pool.close()
178
from multiprocessing import Pool
def nodule_hf(idx):
with h5py.File(image_patch, 'r') as hf:
nodule = hf['nodule'][idx:idx + get_data_num]
return nodule
process_num = 32
get_data_num = 64
with h5py.File(image_patch, 'r') as fin:
nodule_range = range(0, len(fin['nodule']), get_data_num)
pool = Pool(processes = process_num)
pool_nodule = pool.map(nodule_hf, nodule_range)
pool.close()

More Related Content

What's hot

Clustering com numpy e cython
Clustering com numpy e cythonClustering com numpy e cython
Clustering com numpy e cythonAnderson Dantas
 
Palestra sobre Collections com Python
Palestra sobre Collections com PythonPalestra sobre Collections com Python
Palestra sobre Collections com Pythonpugpe
 
Map/reduce, geospatial indexing, and other cool features (Kristina Chodorow)
Map/reduce, geospatial indexing, and other cool features (Kristina Chodorow)Map/reduce, geospatial indexing, and other cool features (Kristina Chodorow)
Map/reduce, geospatial indexing, and other cool features (Kristina Chodorow)MongoSF
 
Super Advanced Python –act1
Super Advanced Python –act1Super Advanced Python –act1
Super Advanced Python –act1
Ke Wei Louis
 
Haskellで学ぶ関数型言語
Haskellで学ぶ関数型言語Haskellで学ぶ関数型言語
Haskellで学ぶ関数型言語
ikdysfm
 
PHP 5.4
PHP 5.4PHP 5.4
Pre-Bootcamp introduction to Elixir
Pre-Bootcamp introduction to ElixirPre-Bootcamp introduction to Elixir
Pre-Bootcamp introduction to Elixir
Paweł Dawczak
 
Introduction to Groovy
Introduction to GroovyIntroduction to Groovy
Introduction to Groovy
André Faria Gomes
 
Pandas+postgre sql 實作 with code
Pandas+postgre sql 實作 with codePandas+postgre sql 實作 with code
Pandas+postgre sql 實作 with code
Tim Hong
 
The Ring programming language version 1.5.1 book - Part 44 of 180
The Ring programming language version 1.5.1 book - Part 44 of 180The Ring programming language version 1.5.1 book - Part 44 of 180
The Ring programming language version 1.5.1 book - Part 44 of 180
Mahmoud Samir Fayed
 
Introducción a Elixir
Introducción a ElixirIntroducción a Elixir
Introducción a Elixir
Svet Ivantchev
 
Groovy puzzlers jug-moscow-part 2
Groovy puzzlers jug-moscow-part 2Groovy puzzlers jug-moscow-part 2
Groovy puzzlers jug-moscow-part 2
Evgeny Borisov
 
Raspberry Pi à la GroovyFX
Raspberry Pi à la GroovyFXRaspberry Pi à la GroovyFX
Raspberry Pi à la GroovyFX
Stephen Chin
 
The Ring programming language version 1.5.3 book - Part 47 of 184
The Ring programming language version 1.5.3 book - Part 47 of 184The Ring programming language version 1.5.3 book - Part 47 of 184
The Ring programming language version 1.5.3 book - Part 47 of 184
Mahmoud Samir Fayed
 
Go vs C++ - CppRussia 2019 Piter BoF
Go vs C++ - CppRussia 2019 Piter BoFGo vs C++ - CppRussia 2019 Piter BoF
Go vs C++ - CppRussia 2019 Piter BoF
Timur Safin
 
The Ring programming language version 1.3 book - Part 37 of 88
The Ring programming language version 1.3 book - Part 37 of 88The Ring programming language version 1.3 book - Part 37 of 88
The Ring programming language version 1.3 book - Part 37 of 88
Mahmoud Samir Fayed
 

What's hot (19)

Clustering com numpy e cython
Clustering com numpy e cythonClustering com numpy e cython
Clustering com numpy e cython
 
Palestra sobre Collections com Python
Palestra sobre Collections com PythonPalestra sobre Collections com Python
Palestra sobre Collections com Python
 
Map/reduce, geospatial indexing, and other cool features (Kristina Chodorow)
Map/reduce, geospatial indexing, and other cool features (Kristina Chodorow)Map/reduce, geospatial indexing, and other cool features (Kristina Chodorow)
Map/reduce, geospatial indexing, and other cool features (Kristina Chodorow)
 
Super Advanced Python –act1
Super Advanced Python –act1Super Advanced Python –act1
Super Advanced Python –act1
 
Calvix python
Calvix pythonCalvix python
Calvix python
 
Haskellで学ぶ関数型言語
Haskellで学ぶ関数型言語Haskellで学ぶ関数型言語
Haskellで学ぶ関数型言語
 
PHP 5.4
PHP 5.4PHP 5.4
PHP 5.4
 
Pre-Bootcamp introduction to Elixir
Pre-Bootcamp introduction to ElixirPre-Bootcamp introduction to Elixir
Pre-Bootcamp introduction to Elixir
 
Introduction to Groovy
Introduction to GroovyIntroduction to Groovy
Introduction to Groovy
 
Pandas+postgre sql 實作 with code
Pandas+postgre sql 實作 with codePandas+postgre sql 實作 with code
Pandas+postgre sql 實作 with code
 
Intro
IntroIntro
Intro
 
The Ring programming language version 1.5.1 book - Part 44 of 180
The Ring programming language version 1.5.1 book - Part 44 of 180The Ring programming language version 1.5.1 book - Part 44 of 180
The Ring programming language version 1.5.1 book - Part 44 of 180
 
Introducción a Elixir
Introducción a ElixirIntroducción a Elixir
Introducción a Elixir
 
Groovy puzzlers jug-moscow-part 2
Groovy puzzlers jug-moscow-part 2Groovy puzzlers jug-moscow-part 2
Groovy puzzlers jug-moscow-part 2
 
Raspberry Pi à la GroovyFX
Raspberry Pi à la GroovyFXRaspberry Pi à la GroovyFX
Raspberry Pi à la GroovyFX
 
The Ring programming language version 1.5.3 book - Part 47 of 184
The Ring programming language version 1.5.3 book - Part 47 of 184The Ring programming language version 1.5.3 book - Part 47 of 184
The Ring programming language version 1.5.3 book - Part 47 of 184
 
Go vs C++ - CppRussia 2019 Piter BoF
Go vs C++ - CppRussia 2019 Piter BoFGo vs C++ - CppRussia 2019 Piter BoF
Go vs C++ - CppRussia 2019 Piter BoF
 
The Ring programming language version 1.3 book - Part 37 of 88
The Ring programming language version 1.3 book - Part 37 of 88The Ring programming language version 1.3 book - Part 37 of 88
The Ring programming language version 1.3 book - Part 37 of 88
 
Ass2 1 (2)
Ass2 1 (2)Ass2 1 (2)
Ass2 1 (2)
 

Similar to 밑바닥부터 시작하는 의료 AI

You are task to add a yawning detection to the programme below;i.pdf
You are task to add a yawning detection to the programme below;i.pdfYou are task to add a yawning detection to the programme below;i.pdf
You are task to add a yawning detection to the programme below;i.pdf
sales223546
 
Will upvote Please fix the following code and post your inputs and o.pdf
Will upvote Please fix the following code and post your inputs and o.pdfWill upvote Please fix the following code and post your inputs and o.pdf
Will upvote Please fix the following code and post your inputs and o.pdf
info335653
 
Python profiling
Python profilingPython profiling
Python profiling
dreampuf
 
Please help this code is supposed to evaluate current node state and i.pdf
Please help this code is supposed to evaluate current node state and i.pdfPlease help this code is supposed to evaluate current node state and i.pdf
Please help this code is supposed to evaluate current node state and i.pdf
climatecontrolsv
 
Python decorators (中文)
Python decorators (中文)Python decorators (中文)
Python decorators (中文)
Yiwei Chen
 
R + Hadoop = Big Data Analytics. How Revolution Analytics' RHadoop Project Al...
R + Hadoop = Big Data Analytics. How Revolution Analytics' RHadoop Project Al...R + Hadoop = Big Data Analytics. How Revolution Analytics' RHadoop Project Al...
R + Hadoop = Big Data Analytics. How Revolution Analytics' RHadoop Project Al...Revolution Analytics
 
circ.db.dbcircleserver(1).py#!usrlocalbinpython3im.docx
circ.db.dbcircleserver(1).py#!usrlocalbinpython3im.docxcirc.db.dbcircleserver(1).py#!usrlocalbinpython3im.docx
circ.db.dbcircleserver(1).py#!usrlocalbinpython3im.docx
christinemaritza
 
Implement the following sorting algorithms Bubble Sort Insertion S.pdf
Implement the following sorting algorithms  Bubble Sort  Insertion S.pdfImplement the following sorting algorithms  Bubble Sort  Insertion S.pdf
Implement the following sorting algorithms Bubble Sort Insertion S.pdf
kesav24
 
Fix this code so that it will run with proper answers, please dont u.pdf
Fix this code so that it will run with proper answers, please dont u.pdfFix this code so that it will run with proper answers, please dont u.pdf
Fix this code so that it will run with proper answers, please dont u.pdf
txkev
 
Please help me fix this code! will upvote. The code needs to produce .pdf
Please help me fix this code! will upvote.  The code needs to produce .pdfPlease help me fix this code! will upvote.  The code needs to produce .pdf
Please help me fix this code! will upvote. The code needs to produce .pdf
climatecontrolsv
 
Python Tidbits
Python TidbitsPython Tidbits
Python Tidbits
Mitchell Vitez
 
Chapter 02 functions -class xii
Chapter 02   functions -class xiiChapter 02   functions -class xii
Chapter 02 functions -class xii
Praveen M Jigajinni
 
Introduction to Python
Introduction to PythonIntroduction to Python
Introduction to Python
UC San Diego
 
Wrangle 2016: (Lightning Talk) FizzBuzz in TensorFlow
Wrangle 2016: (Lightning Talk) FizzBuzz in TensorFlowWrangle 2016: (Lightning Talk) FizzBuzz in TensorFlow
Wrangle 2016: (Lightning Talk) FizzBuzz in TensorFlow
WrangleConf
 
Corona sdk
Corona sdkCorona sdk
Using-Python-Libraries.9485146.powerpoint.pptx
Using-Python-Libraries.9485146.powerpoint.pptxUsing-Python-Libraries.9485146.powerpoint.pptx
Using-Python-Libraries.9485146.powerpoint.pptx
UadAccount
 
Building Real Time Systems on MongoDB Using the Oplog at Stripe
Building Real Time Systems on MongoDB Using the Oplog at StripeBuilding Real Time Systems on MongoDB Using the Oplog at Stripe
Building Real Time Systems on MongoDB Using the Oplog at StripeMongoDB
 
Can you fix the errors- It isn't working when I try to run import s.pdf
Can you fix the errors- It isn't working when I try to run    import s.pdfCan you fix the errors- It isn't working when I try to run    import s.pdf
Can you fix the errors- It isn't working when I try to run import s.pdf
aksachdevahosymills
 

Similar to 밑바닥부터 시작하는 의료 AI (20)

You are task to add a yawning detection to the programme below;i.pdf
You are task to add a yawning detection to the programme below;i.pdfYou are task to add a yawning detection to the programme below;i.pdf
You are task to add a yawning detection to the programme below;i.pdf
 
Will upvote Please fix the following code and post your inputs and o.pdf
Will upvote Please fix the following code and post your inputs and o.pdfWill upvote Please fix the following code and post your inputs and o.pdf
Will upvote Please fix the following code and post your inputs and o.pdf
 
Python profiling
Python profilingPython profiling
Python profiling
 
Please help this code is supposed to evaluate current node state and i.pdf
Please help this code is supposed to evaluate current node state and i.pdfPlease help this code is supposed to evaluate current node state and i.pdf
Please help this code is supposed to evaluate current node state and i.pdf
 
Python decorators (中文)
Python decorators (中文)Python decorators (中文)
Python decorators (中文)
 
R + Hadoop = Big Data Analytics. How Revolution Analytics' RHadoop Project Al...
R + Hadoop = Big Data Analytics. How Revolution Analytics' RHadoop Project Al...R + Hadoop = Big Data Analytics. How Revolution Analytics' RHadoop Project Al...
R + Hadoop = Big Data Analytics. How Revolution Analytics' RHadoop Project Al...
 
circ.db.dbcircleserver(1).py#!usrlocalbinpython3im.docx
circ.db.dbcircleserver(1).py#!usrlocalbinpython3im.docxcirc.db.dbcircleserver(1).py#!usrlocalbinpython3im.docx
circ.db.dbcircleserver(1).py#!usrlocalbinpython3im.docx
 
Implement the following sorting algorithms Bubble Sort Insertion S.pdf
Implement the following sorting algorithms  Bubble Sort  Insertion S.pdfImplement the following sorting algorithms  Bubble Sort  Insertion S.pdf
Implement the following sorting algorithms Bubble Sort Insertion S.pdf
 
RHadoop の紹介
RHadoop の紹介RHadoop の紹介
RHadoop の紹介
 
Fix this code so that it will run with proper answers, please dont u.pdf
Fix this code so that it will run with proper answers, please dont u.pdfFix this code so that it will run with proper answers, please dont u.pdf
Fix this code so that it will run with proper answers, please dont u.pdf
 
Please help me fix this code! will upvote. The code needs to produce .pdf
Please help me fix this code! will upvote.  The code needs to produce .pdfPlease help me fix this code! will upvote.  The code needs to produce .pdf
Please help me fix this code! will upvote. The code needs to produce .pdf
 
Python Tidbits
Python TidbitsPython Tidbits
Python Tidbits
 
Chapter 02 functions -class xii
Chapter 02   functions -class xiiChapter 02   functions -class xii
Chapter 02 functions -class xii
 
Introduction to Python
Introduction to PythonIntroduction to Python
Introduction to Python
 
Wrangle 2016: (Lightning Talk) FizzBuzz in TensorFlow
Wrangle 2016: (Lightning Talk) FizzBuzz in TensorFlowWrangle 2016: (Lightning Talk) FizzBuzz in TensorFlow
Wrangle 2016: (Lightning Talk) FizzBuzz in TensorFlow
 
Intro to Python
Intro to PythonIntro to Python
Intro to Python
 
Corona sdk
Corona sdkCorona sdk
Corona sdk
 
Using-Python-Libraries.9485146.powerpoint.pptx
Using-Python-Libraries.9485146.powerpoint.pptxUsing-Python-Libraries.9485146.powerpoint.pptx
Using-Python-Libraries.9485146.powerpoint.pptx
 
Building Real Time Systems on MongoDB Using the Oplog at Stripe
Building Real Time Systems on MongoDB Using the Oplog at StripeBuilding Real Time Systems on MongoDB Using the Oplog at Stripe
Building Real Time Systems on MongoDB Using the Oplog at Stripe
 
Can you fix the errors- It isn't working when I try to run import s.pdf
Can you fix the errors- It isn't working when I try to run    import s.pdfCan you fix the errors- It isn't working when I try to run    import s.pdf
Can you fix the errors- It isn't working when I try to run import s.pdf
 

More from NAVER Engineering

React vac pattern
React vac patternReact vac pattern
React vac pattern
NAVER Engineering
 
디자인 시스템에 직방 ZUIX
디자인 시스템에 직방 ZUIX디자인 시스템에 직방 ZUIX
디자인 시스템에 직방 ZUIX
NAVER Engineering
 
진화하는 디자인 시스템(걸음마 편)
진화하는 디자인 시스템(걸음마 편)진화하는 디자인 시스템(걸음마 편)
진화하는 디자인 시스템(걸음마 편)
NAVER Engineering
 
서비스 운영을 위한 디자인시스템 프로젝트
서비스 운영을 위한 디자인시스템 프로젝트서비스 운영을 위한 디자인시스템 프로젝트
서비스 운영을 위한 디자인시스템 프로젝트
NAVER Engineering
 
BPL(Banksalad Product Language) 무야호
BPL(Banksalad Product Language) 무야호BPL(Banksalad Product Language) 무야호
BPL(Banksalad Product Language) 무야호
NAVER Engineering
 
이번 생에 디자인 시스템은 처음이라
이번 생에 디자인 시스템은 처음이라이번 생에 디자인 시스템은 처음이라
이번 생에 디자인 시스템은 처음이라
NAVER Engineering
 
날고 있는 여러 비행기 넘나 들며 정비하기
날고 있는 여러 비행기 넘나 들며 정비하기날고 있는 여러 비행기 넘나 들며 정비하기
날고 있는 여러 비행기 넘나 들며 정비하기
NAVER Engineering
 
쏘카프레임 구축 배경과 과정
 쏘카프레임 구축 배경과 과정 쏘카프레임 구축 배경과 과정
쏘카프레임 구축 배경과 과정
NAVER Engineering
 
플랫폼 디자이너 없이 디자인 시스템을 구축하는 프로덕트 디자이너의 우당탕탕 고통 연대기
플랫폼 디자이너 없이 디자인 시스템을 구축하는 프로덕트 디자이너의 우당탕탕 고통 연대기플랫폼 디자이너 없이 디자인 시스템을 구축하는 프로덕트 디자이너의 우당탕탕 고통 연대기
플랫폼 디자이너 없이 디자인 시스템을 구축하는 프로덕트 디자이너의 우당탕탕 고통 연대기
NAVER Engineering
 
200820 NAVER TECH CONCERT 15_Code Review is Horse(코드리뷰는 말이야)(feat.Latte)
200820 NAVER TECH CONCERT 15_Code Review is Horse(코드리뷰는 말이야)(feat.Latte)200820 NAVER TECH CONCERT 15_Code Review is Horse(코드리뷰는 말이야)(feat.Latte)
200820 NAVER TECH CONCERT 15_Code Review is Horse(코드리뷰는 말이야)(feat.Latte)
NAVER Engineering
 
200819 NAVER TECH CONCERT 03_화려한 코루틴이 내 앱을 감싸네! 코루틴으로 작성해보는 깔끔한 비동기 코드
200819 NAVER TECH CONCERT 03_화려한 코루틴이 내 앱을 감싸네! 코루틴으로 작성해보는 깔끔한 비동기 코드200819 NAVER TECH CONCERT 03_화려한 코루틴이 내 앱을 감싸네! 코루틴으로 작성해보는 깔끔한 비동기 코드
200819 NAVER TECH CONCERT 03_화려한 코루틴이 내 앱을 감싸네! 코루틴으로 작성해보는 깔끔한 비동기 코드
NAVER Engineering
 
200819 NAVER TECH CONCERT 10_맥북에서도 아이맥프로에서 빌드하는 것처럼 빌드 속도 빠르게 하기
200819 NAVER TECH CONCERT 10_맥북에서도 아이맥프로에서 빌드하는 것처럼 빌드 속도 빠르게 하기200819 NAVER TECH CONCERT 10_맥북에서도 아이맥프로에서 빌드하는 것처럼 빌드 속도 빠르게 하기
200819 NAVER TECH CONCERT 10_맥북에서도 아이맥프로에서 빌드하는 것처럼 빌드 속도 빠르게 하기
NAVER Engineering
 
200819 NAVER TECH CONCERT 08_성능을 고민하는 슬기로운 개발자 생활
200819 NAVER TECH CONCERT 08_성능을 고민하는 슬기로운 개발자 생활200819 NAVER TECH CONCERT 08_성능을 고민하는 슬기로운 개발자 생활
200819 NAVER TECH CONCERT 08_성능을 고민하는 슬기로운 개발자 생활
NAVER Engineering
 
200819 NAVER TECH CONCERT 05_모르면 손해보는 Android 디버깅/분석 꿀팁 대방출
200819 NAVER TECH CONCERT 05_모르면 손해보는 Android 디버깅/분석 꿀팁 대방출200819 NAVER TECH CONCERT 05_모르면 손해보는 Android 디버깅/분석 꿀팁 대방출
200819 NAVER TECH CONCERT 05_모르면 손해보는 Android 디버깅/분석 꿀팁 대방출
NAVER Engineering
 
200819 NAVER TECH CONCERT 09_Case.xcodeproj - 좋은 동료로 거듭나기 위한 노하우
200819 NAVER TECH CONCERT 09_Case.xcodeproj - 좋은 동료로 거듭나기 위한 노하우200819 NAVER TECH CONCERT 09_Case.xcodeproj - 좋은 동료로 거듭나기 위한 노하우
200819 NAVER TECH CONCERT 09_Case.xcodeproj - 좋은 동료로 거듭나기 위한 노하우
NAVER Engineering
 
200820 NAVER TECH CONCERT 14_야 너두 할 수 있어. 비전공자, COBOL 개발자를 거쳐 네이버에서 FE 개발하게 된...
200820 NAVER TECH CONCERT 14_야 너두 할 수 있어. 비전공자, COBOL 개발자를 거쳐 네이버에서 FE 개발하게 된...200820 NAVER TECH CONCERT 14_야 너두 할 수 있어. 비전공자, COBOL 개발자를 거쳐 네이버에서 FE 개발하게 된...
200820 NAVER TECH CONCERT 14_야 너두 할 수 있어. 비전공자, COBOL 개발자를 거쳐 네이버에서 FE 개발하게 된...
NAVER Engineering
 
200820 NAVER TECH CONCERT 13_네이버에서 오픈 소스 개발을 통해 성장하는 방법
200820 NAVER TECH CONCERT 13_네이버에서 오픈 소스 개발을 통해 성장하는 방법200820 NAVER TECH CONCERT 13_네이버에서 오픈 소스 개발을 통해 성장하는 방법
200820 NAVER TECH CONCERT 13_네이버에서 오픈 소스 개발을 통해 성장하는 방법
NAVER Engineering
 
200820 NAVER TECH CONCERT 12_상반기 네이버 인턴을 돌아보며
200820 NAVER TECH CONCERT 12_상반기 네이버 인턴을 돌아보며200820 NAVER TECH CONCERT 12_상반기 네이버 인턴을 돌아보며
200820 NAVER TECH CONCERT 12_상반기 네이버 인턴을 돌아보며
NAVER Engineering
 
200820 NAVER TECH CONCERT 11_빠르게 성장하는 슈퍼루키로 거듭나기
200820 NAVER TECH CONCERT 11_빠르게 성장하는 슈퍼루키로 거듭나기200820 NAVER TECH CONCERT 11_빠르게 성장하는 슈퍼루키로 거듭나기
200820 NAVER TECH CONCERT 11_빠르게 성장하는 슈퍼루키로 거듭나기
NAVER Engineering
 
200819 NAVER TECH CONCERT 07_신입 iOS 개발자 개발업무 적응기
200819 NAVER TECH CONCERT 07_신입 iOS 개발자 개발업무 적응기200819 NAVER TECH CONCERT 07_신입 iOS 개발자 개발업무 적응기
200819 NAVER TECH CONCERT 07_신입 iOS 개발자 개발업무 적응기
NAVER Engineering
 

More from NAVER Engineering (20)

React vac pattern
React vac patternReact vac pattern
React vac pattern
 
디자인 시스템에 직방 ZUIX
디자인 시스템에 직방 ZUIX디자인 시스템에 직방 ZUIX
디자인 시스템에 직방 ZUIX
 
진화하는 디자인 시스템(걸음마 편)
진화하는 디자인 시스템(걸음마 편)진화하는 디자인 시스템(걸음마 편)
진화하는 디자인 시스템(걸음마 편)
 
서비스 운영을 위한 디자인시스템 프로젝트
서비스 운영을 위한 디자인시스템 프로젝트서비스 운영을 위한 디자인시스템 프로젝트
서비스 운영을 위한 디자인시스템 프로젝트
 
BPL(Banksalad Product Language) 무야호
BPL(Banksalad Product Language) 무야호BPL(Banksalad Product Language) 무야호
BPL(Banksalad Product Language) 무야호
 
이번 생에 디자인 시스템은 처음이라
이번 생에 디자인 시스템은 처음이라이번 생에 디자인 시스템은 처음이라
이번 생에 디자인 시스템은 처음이라
 
날고 있는 여러 비행기 넘나 들며 정비하기
날고 있는 여러 비행기 넘나 들며 정비하기날고 있는 여러 비행기 넘나 들며 정비하기
날고 있는 여러 비행기 넘나 들며 정비하기
 
쏘카프레임 구축 배경과 과정
 쏘카프레임 구축 배경과 과정 쏘카프레임 구축 배경과 과정
쏘카프레임 구축 배경과 과정
 
플랫폼 디자이너 없이 디자인 시스템을 구축하는 프로덕트 디자이너의 우당탕탕 고통 연대기
플랫폼 디자이너 없이 디자인 시스템을 구축하는 프로덕트 디자이너의 우당탕탕 고통 연대기플랫폼 디자이너 없이 디자인 시스템을 구축하는 프로덕트 디자이너의 우당탕탕 고통 연대기
플랫폼 디자이너 없이 디자인 시스템을 구축하는 프로덕트 디자이너의 우당탕탕 고통 연대기
 
200820 NAVER TECH CONCERT 15_Code Review is Horse(코드리뷰는 말이야)(feat.Latte)
200820 NAVER TECH CONCERT 15_Code Review is Horse(코드리뷰는 말이야)(feat.Latte)200820 NAVER TECH CONCERT 15_Code Review is Horse(코드리뷰는 말이야)(feat.Latte)
200820 NAVER TECH CONCERT 15_Code Review is Horse(코드리뷰는 말이야)(feat.Latte)
 
200819 NAVER TECH CONCERT 03_화려한 코루틴이 내 앱을 감싸네! 코루틴으로 작성해보는 깔끔한 비동기 코드
200819 NAVER TECH CONCERT 03_화려한 코루틴이 내 앱을 감싸네! 코루틴으로 작성해보는 깔끔한 비동기 코드200819 NAVER TECH CONCERT 03_화려한 코루틴이 내 앱을 감싸네! 코루틴으로 작성해보는 깔끔한 비동기 코드
200819 NAVER TECH CONCERT 03_화려한 코루틴이 내 앱을 감싸네! 코루틴으로 작성해보는 깔끔한 비동기 코드
 
200819 NAVER TECH CONCERT 10_맥북에서도 아이맥프로에서 빌드하는 것처럼 빌드 속도 빠르게 하기
200819 NAVER TECH CONCERT 10_맥북에서도 아이맥프로에서 빌드하는 것처럼 빌드 속도 빠르게 하기200819 NAVER TECH CONCERT 10_맥북에서도 아이맥프로에서 빌드하는 것처럼 빌드 속도 빠르게 하기
200819 NAVER TECH CONCERT 10_맥북에서도 아이맥프로에서 빌드하는 것처럼 빌드 속도 빠르게 하기
 
200819 NAVER TECH CONCERT 08_성능을 고민하는 슬기로운 개발자 생활
200819 NAVER TECH CONCERT 08_성능을 고민하는 슬기로운 개발자 생활200819 NAVER TECH CONCERT 08_성능을 고민하는 슬기로운 개발자 생활
200819 NAVER TECH CONCERT 08_성능을 고민하는 슬기로운 개발자 생활
 
200819 NAVER TECH CONCERT 05_모르면 손해보는 Android 디버깅/분석 꿀팁 대방출
200819 NAVER TECH CONCERT 05_모르면 손해보는 Android 디버깅/분석 꿀팁 대방출200819 NAVER TECH CONCERT 05_모르면 손해보는 Android 디버깅/분석 꿀팁 대방출
200819 NAVER TECH CONCERT 05_모르면 손해보는 Android 디버깅/분석 꿀팁 대방출
 
200819 NAVER TECH CONCERT 09_Case.xcodeproj - 좋은 동료로 거듭나기 위한 노하우
200819 NAVER TECH CONCERT 09_Case.xcodeproj - 좋은 동료로 거듭나기 위한 노하우200819 NAVER TECH CONCERT 09_Case.xcodeproj - 좋은 동료로 거듭나기 위한 노하우
200819 NAVER TECH CONCERT 09_Case.xcodeproj - 좋은 동료로 거듭나기 위한 노하우
 
200820 NAVER TECH CONCERT 14_야 너두 할 수 있어. 비전공자, COBOL 개발자를 거쳐 네이버에서 FE 개발하게 된...
200820 NAVER TECH CONCERT 14_야 너두 할 수 있어. 비전공자, COBOL 개발자를 거쳐 네이버에서 FE 개발하게 된...200820 NAVER TECH CONCERT 14_야 너두 할 수 있어. 비전공자, COBOL 개발자를 거쳐 네이버에서 FE 개발하게 된...
200820 NAVER TECH CONCERT 14_야 너두 할 수 있어. 비전공자, COBOL 개발자를 거쳐 네이버에서 FE 개발하게 된...
 
200820 NAVER TECH CONCERT 13_네이버에서 오픈 소스 개발을 통해 성장하는 방법
200820 NAVER TECH CONCERT 13_네이버에서 오픈 소스 개발을 통해 성장하는 방법200820 NAVER TECH CONCERT 13_네이버에서 오픈 소스 개발을 통해 성장하는 방법
200820 NAVER TECH CONCERT 13_네이버에서 오픈 소스 개발을 통해 성장하는 방법
 
200820 NAVER TECH CONCERT 12_상반기 네이버 인턴을 돌아보며
200820 NAVER TECH CONCERT 12_상반기 네이버 인턴을 돌아보며200820 NAVER TECH CONCERT 12_상반기 네이버 인턴을 돌아보며
200820 NAVER TECH CONCERT 12_상반기 네이버 인턴을 돌아보며
 
200820 NAVER TECH CONCERT 11_빠르게 성장하는 슈퍼루키로 거듭나기
200820 NAVER TECH CONCERT 11_빠르게 성장하는 슈퍼루키로 거듭나기200820 NAVER TECH CONCERT 11_빠르게 성장하는 슈퍼루키로 거듭나기
200820 NAVER TECH CONCERT 11_빠르게 성장하는 슈퍼루키로 거듭나기
 
200819 NAVER TECH CONCERT 07_신입 iOS 개발자 개발업무 적응기
200819 NAVER TECH CONCERT 07_신입 iOS 개발자 개발업무 적응기200819 NAVER TECH CONCERT 07_신입 iOS 개발자 개발업무 적응기
200819 NAVER TECH CONCERT 07_신입 iOS 개발자 개발업무 적응기
 

Recently uploaded

GDG Cloud Southlake #33: Boule & Rebala: Effective AppSec in SDLC using Deplo...
GDG Cloud Southlake #33: Boule & Rebala: Effective AppSec in SDLC using Deplo...GDG Cloud Southlake #33: Boule & Rebala: Effective AppSec in SDLC using Deplo...
GDG Cloud Southlake #33: Boule & Rebala: Effective AppSec in SDLC using Deplo...
James Anderson
 
UiPath Test Automation using UiPath Test Suite series, part 4
UiPath Test Automation using UiPath Test Suite series, part 4UiPath Test Automation using UiPath Test Suite series, part 4
UiPath Test Automation using UiPath Test Suite series, part 4
DianaGray10
 
PCI PIN Basics Webinar from the Controlcase Team
PCI PIN Basics Webinar from the Controlcase TeamPCI PIN Basics Webinar from the Controlcase Team
PCI PIN Basics Webinar from the Controlcase Team
ControlCase
 
Assuring Contact Center Experiences for Your Customers With ThousandEyes
Assuring Contact Center Experiences for Your Customers With ThousandEyesAssuring Contact Center Experiences for Your Customers With ThousandEyes
Assuring Contact Center Experiences for Your Customers With ThousandEyes
ThousandEyes
 
Builder.ai Founder Sachin Dev Duggal's Strategic Approach to Create an Innova...
Builder.ai Founder Sachin Dev Duggal's Strategic Approach to Create an Innova...Builder.ai Founder Sachin Dev Duggal's Strategic Approach to Create an Innova...
Builder.ai Founder Sachin Dev Duggal's Strategic Approach to Create an Innova...
Ramesh Iyer
 
Dev Dives: Train smarter, not harder – active learning and UiPath LLMs for do...
Dev Dives: Train smarter, not harder – active learning and UiPath LLMs for do...Dev Dives: Train smarter, not harder – active learning and UiPath LLMs for do...
Dev Dives: Train smarter, not harder – active learning and UiPath LLMs for do...
UiPathCommunity
 
Leading Change strategies and insights for effective change management pdf 1.pdf
Leading Change strategies and insights for effective change management pdf 1.pdfLeading Change strategies and insights for effective change management pdf 1.pdf
Leading Change strategies and insights for effective change management pdf 1.pdf
OnBoard
 
FIDO Alliance Osaka Seminar: The WebAuthn API and Discoverable Credentials.pdf
FIDO Alliance Osaka Seminar: The WebAuthn API and Discoverable Credentials.pdfFIDO Alliance Osaka Seminar: The WebAuthn API and Discoverable Credentials.pdf
FIDO Alliance Osaka Seminar: The WebAuthn API and Discoverable Credentials.pdf
FIDO Alliance
 
Generating a custom Ruby SDK for your web service or Rails API using Smithy
Generating a custom Ruby SDK for your web service or Rails API using SmithyGenerating a custom Ruby SDK for your web service or Rails API using Smithy
Generating a custom Ruby SDK for your web service or Rails API using Smithy
g2nightmarescribd
 
Kubernetes & AI - Beauty and the Beast !?! @KCD Istanbul 2024
Kubernetes & AI - Beauty and the Beast !?! @KCD Istanbul 2024Kubernetes & AI - Beauty and the Beast !?! @KCD Istanbul 2024
Kubernetes & AI - Beauty and the Beast !?! @KCD Istanbul 2024
Tobias Schneck
 
Smart TV Buyer Insights Survey 2024 by 91mobiles.pdf
Smart TV Buyer Insights Survey 2024 by 91mobiles.pdfSmart TV Buyer Insights Survey 2024 by 91mobiles.pdf
Smart TV Buyer Insights Survey 2024 by 91mobiles.pdf
91mobiles
 
Bits & Pixels using AI for Good.........
Bits & Pixels using AI for Good.........Bits & Pixels using AI for Good.........
Bits & Pixels using AI for Good.........
Alison B. Lowndes
 
Designing Great Products: The Power of Design and Leadership by Chief Designe...
Designing Great Products: The Power of Design and Leadership by Chief Designe...Designing Great Products: The Power of Design and Leadership by Chief Designe...
Designing Great Products: The Power of Design and Leadership by Chief Designe...
Product School
 
Essentials of Automations: Optimizing FME Workflows with Parameters
Essentials of Automations: Optimizing FME Workflows with ParametersEssentials of Automations: Optimizing FME Workflows with Parameters
Essentials of Automations: Optimizing FME Workflows with Parameters
Safe Software
 
FIDO Alliance Osaka Seminar: Overview.pdf
FIDO Alliance Osaka Seminar: Overview.pdfFIDO Alliance Osaka Seminar: Overview.pdf
FIDO Alliance Osaka Seminar: Overview.pdf
FIDO Alliance
 
UiPath Test Automation using UiPath Test Suite series, part 3
UiPath Test Automation using UiPath Test Suite series, part 3UiPath Test Automation using UiPath Test Suite series, part 3
UiPath Test Automation using UiPath Test Suite series, part 3
DianaGray10
 
Securing your Kubernetes cluster_ a step-by-step guide to success !
Securing your Kubernetes cluster_ a step-by-step guide to success !Securing your Kubernetes cluster_ a step-by-step guide to success !
Securing your Kubernetes cluster_ a step-by-step guide to success !
KatiaHIMEUR1
 
De-mystifying Zero to One: Design Informed Techniques for Greenfield Innovati...
De-mystifying Zero to One: Design Informed Techniques for Greenfield Innovati...De-mystifying Zero to One: Design Informed Techniques for Greenfield Innovati...
De-mystifying Zero to One: Design Informed Techniques for Greenfield Innovati...
Product School
 
JMeter webinar - integration with InfluxDB and Grafana
JMeter webinar - integration with InfluxDB and GrafanaJMeter webinar - integration with InfluxDB and Grafana
JMeter webinar - integration with InfluxDB and Grafana
RTTS
 
FIDO Alliance Osaka Seminar: Passkeys and the Road Ahead.pdf
FIDO Alliance Osaka Seminar: Passkeys and the Road Ahead.pdfFIDO Alliance Osaka Seminar: Passkeys and the Road Ahead.pdf
FIDO Alliance Osaka Seminar: Passkeys and the Road Ahead.pdf
FIDO Alliance
 

Recently uploaded (20)

GDG Cloud Southlake #33: Boule & Rebala: Effective AppSec in SDLC using Deplo...
GDG Cloud Southlake #33: Boule & Rebala: Effective AppSec in SDLC using Deplo...GDG Cloud Southlake #33: Boule & Rebala: Effective AppSec in SDLC using Deplo...
GDG Cloud Southlake #33: Boule & Rebala: Effective AppSec in SDLC using Deplo...
 
UiPath Test Automation using UiPath Test Suite series, part 4
UiPath Test Automation using UiPath Test Suite series, part 4UiPath Test Automation using UiPath Test Suite series, part 4
UiPath Test Automation using UiPath Test Suite series, part 4
 
PCI PIN Basics Webinar from the Controlcase Team
PCI PIN Basics Webinar from the Controlcase TeamPCI PIN Basics Webinar from the Controlcase Team
PCI PIN Basics Webinar from the Controlcase Team
 
Assuring Contact Center Experiences for Your Customers With ThousandEyes
Assuring Contact Center Experiences for Your Customers With ThousandEyesAssuring Contact Center Experiences for Your Customers With ThousandEyes
Assuring Contact Center Experiences for Your Customers With ThousandEyes
 
Builder.ai Founder Sachin Dev Duggal's Strategic Approach to Create an Innova...
Builder.ai Founder Sachin Dev Duggal's Strategic Approach to Create an Innova...Builder.ai Founder Sachin Dev Duggal's Strategic Approach to Create an Innova...
Builder.ai Founder Sachin Dev Duggal's Strategic Approach to Create an Innova...
 
Dev Dives: Train smarter, not harder – active learning and UiPath LLMs for do...
Dev Dives: Train smarter, not harder – active learning and UiPath LLMs for do...Dev Dives: Train smarter, not harder – active learning and UiPath LLMs for do...
Dev Dives: Train smarter, not harder – active learning and UiPath LLMs for do...
 
Leading Change strategies and insights for effective change management pdf 1.pdf
Leading Change strategies and insights for effective change management pdf 1.pdfLeading Change strategies and insights for effective change management pdf 1.pdf
Leading Change strategies and insights for effective change management pdf 1.pdf
 
FIDO Alliance Osaka Seminar: The WebAuthn API and Discoverable Credentials.pdf
FIDO Alliance Osaka Seminar: The WebAuthn API and Discoverable Credentials.pdfFIDO Alliance Osaka Seminar: The WebAuthn API and Discoverable Credentials.pdf
FIDO Alliance Osaka Seminar: The WebAuthn API and Discoverable Credentials.pdf
 
Generating a custom Ruby SDK for your web service or Rails API using Smithy
Generating a custom Ruby SDK for your web service or Rails API using SmithyGenerating a custom Ruby SDK for your web service or Rails API using Smithy
Generating a custom Ruby SDK for your web service or Rails API using Smithy
 
Kubernetes & AI - Beauty and the Beast !?! @KCD Istanbul 2024
Kubernetes & AI - Beauty and the Beast !?! @KCD Istanbul 2024Kubernetes & AI - Beauty and the Beast !?! @KCD Istanbul 2024
Kubernetes & AI - Beauty and the Beast !?! @KCD Istanbul 2024
 
Smart TV Buyer Insights Survey 2024 by 91mobiles.pdf
Smart TV Buyer Insights Survey 2024 by 91mobiles.pdfSmart TV Buyer Insights Survey 2024 by 91mobiles.pdf
Smart TV Buyer Insights Survey 2024 by 91mobiles.pdf
 
Bits & Pixels using AI for Good.........
Bits & Pixels using AI for Good.........Bits & Pixels using AI for Good.........
Bits & Pixels using AI for Good.........
 
Designing Great Products: The Power of Design and Leadership by Chief Designe...
Designing Great Products: The Power of Design and Leadership by Chief Designe...Designing Great Products: The Power of Design and Leadership by Chief Designe...
Designing Great Products: The Power of Design and Leadership by Chief Designe...
 
Essentials of Automations: Optimizing FME Workflows with Parameters
Essentials of Automations: Optimizing FME Workflows with ParametersEssentials of Automations: Optimizing FME Workflows with Parameters
Essentials of Automations: Optimizing FME Workflows with Parameters
 
FIDO Alliance Osaka Seminar: Overview.pdf
FIDO Alliance Osaka Seminar: Overview.pdfFIDO Alliance Osaka Seminar: Overview.pdf
FIDO Alliance Osaka Seminar: Overview.pdf
 
UiPath Test Automation using UiPath Test Suite series, part 3
UiPath Test Automation using UiPath Test Suite series, part 3UiPath Test Automation using UiPath Test Suite series, part 3
UiPath Test Automation using UiPath Test Suite series, part 3
 
Securing your Kubernetes cluster_ a step-by-step guide to success !
Securing your Kubernetes cluster_ a step-by-step guide to success !Securing your Kubernetes cluster_ a step-by-step guide to success !
Securing your Kubernetes cluster_ a step-by-step guide to success !
 
De-mystifying Zero to One: Design Informed Techniques for Greenfield Innovati...
De-mystifying Zero to One: Design Informed Techniques for Greenfield Innovati...De-mystifying Zero to One: Design Informed Techniques for Greenfield Innovati...
De-mystifying Zero to One: Design Informed Techniques for Greenfield Innovati...
 
JMeter webinar - integration with InfluxDB and Grafana
JMeter webinar - integration with InfluxDB and GrafanaJMeter webinar - integration with InfluxDB and Grafana
JMeter webinar - integration with InfluxDB and Grafana
 
FIDO Alliance Osaka Seminar: Passkeys and the Road Ahead.pdf
FIDO Alliance Osaka Seminar: Passkeys and the Road Ahead.pdfFIDO Alliance Osaka Seminar: Passkeys and the Road Ahead.pdf
FIDO Alliance Osaka Seminar: Passkeys and the Road Ahead.pdf
 

밑바닥부터 시작하는 의료 AI

  • 1. 밑바닥부터 시작하는 의료AI 루닛 (Lunit) 김준호 https://github.com/taki0112
  • 2. 2
  • 3. 3
  • 4. 4
  • 6. 6
  • 7. 7
  • 8. 8
  • 9. 9
  • 10. 10
  • 11. 11
  • 12. 12
  • 13. 13
  • 14. 14
  • 15. 15
  • 16. 16
  • 17. 17
  • 18. 18
  • 19. 19 def read_csv(filename): lines = [] with open(filename, 'r') as f: csvreader = csv.reader(f) for line in csvreader: lines.append(line) lines = lines[1:] # remove csv headers annotations_dict = {} for i in lines: series_uid, x, y, z, diameter = i value = {'position':[float(x),float(y),float(z)], 'diameter':float(diameter)} if series_uid in annotations_dict.keys(): annotations_dict[series_uid].append(value) else: annotations_dict[series_uid] = [value] return annotations_dict
  • 20. 20
  • 21. 21
  • 22. 22 def load_itk_image(filename): itkimage = sitk.ReadImage(filename) numpyImage = sitk.GetArrayFromImage(itkimage) numpyOrigin = np.array(list(reversed(itkimage.GetOrigin()))) numpySpacing = np.array(list(reversed(itkimage.GetSpacing()))) return numpyImage, numpyOrigin, numpySpacing
  • 23. 23 def load_itk_image(filename): itkimage = sitk.ReadImage(filename) numpyImage = sitk.GetArrayFromImage(itkimage) numpyOrigin = np.array(list(reversed(itkimage.GetOrigin()))) numpySpacing = np.array(list(reversed(itkimage.GetSpacing()))) return numpyImage, numpyOrigin, numpySpacing
  • 24. 24 def load_itk_image(filename): itkimage = sitk.ReadImage(filename) numpyImage = sitk.GetArrayFromImage(itkimage) numpyOrigin = np.array(list(reversed(itkimage.GetOrigin()))) numpySpacing = np.array(list(reversed(itkimage.GetSpacing()))) return numpyImage, numpyOrigin, numpySpacing
  • 25. 25 def load_itk_image(filename): itkimage = sitk.ReadImage(filename) numpyImage = sitk.GetArrayFromImage(itkimage) numpyOrigin = np.array(list(reversed(itkimage.GetOrigin()))) numpySpacing = np.array(list(reversed(itkimage.GetSpacing()))) return numpyImage, numpyOrigin, numpySpacing def load_itk_image(filename): itkimage = sitk.ReadImage(filename) numpyImage = np.transpose(sitk.GetArrayFromImage(itkimage)) numpyOrigin = np.array(itkimage.GetOrigin()) numpySpacing = np.array(itkimage.GetSpacing()) return numpyImage, numpyOrigin, numpySpacing
  • 26. 26 def load_itk_image(filename): itkimage = sitk.ReadImage(filename) numpyImage = sitk.GetArrayFromImage(itkimage) numpyOrigin = np.array(list(reversed(itkimage.GetOrigin()))) numpySpacing = np.array(list(reversed(itkimage.GetSpacing()))) return numpyImage, numpyOrigin, numpySpacing def load_itk_image(filename): itkimage = sitk.ReadImage(filename) numpyImage = np.transpose(sitk.GetArrayFromImage(itkimage)) numpyOrigin = np.array(itkimage.GetOrigin()) numpySpacing = np.array(itkimage.GetSpacing()) return numpyImage, numpyOrigin, numpySpacing
  • 27. 27
  • 28. 28
  • 29. 29
  • 30. 30
  • 31. 31
  • 32. 32
  • 33. 33
  • 34. 34 def resample(image, org_spacing, new_spacing=OUTPUT_SPACING): resize_factor = org_spacing / new_spacing new_real_shape = image.shape * resize_factor new_shape = np.round(new_real_shape) image = scipy.ndimage.interpolation.zoom(image, real_resize_factor, mode='nearest') return image
  • 35. 35 def resample(image, org_spacing, new_spacing=OUTPUT_SPACING): resize_factor = org_spacing / new_spacing new_real_shape = image.shape * resize_factor new_shape = np.round(new_real_shape) image = scipy.ndimage.interpolation.zoom(image, real_resize_factor, mode='nearest') return image
  • 36. 36 def resample(image, org_spacing, new_spacing=OUTPUT_SPACING): resize_factor = org_spacing / new_spacing new_real_shape = image.shape * resize_factor new_shape = np.round(new_real_shape) image = scipy.ndimage.interpolation.zoom(image, real_resize_factor, mode='nearest') return image
  • 37. 37 def resample(image, org_spacing, new_spacing=OUTPUT_SPACING): resize_factor = org_spacing / new_spacing new_real_shape = image.shape * resize_factor new_shape = np.round(new_real_shape) image = scipy.ndimage.interpolation.zoom(image, real_resize_factor, mode='nearest') return image
  • 38. 38 def resample(image, org_spacing, new_spacing=OUTPUT_SPACING): resize_factor = org_spacing / new_spacing new_real_shape = image.shape * resize_factor new_shape = np.round(new_real_shape) image = scipy.ndimage.interpolation.zoom(image, real_resize_factor, mode='nearest') return image
  • 39. 39 def resample(image, org_spacing, new_spacing=OUTPUT_SPACING): resize_factor = org_spacing / new_spacing new_real_shape = image.shape * resize_factor new_shape = np.round(new_real_shape) image = scipy.ndimage.interpolation.zoom(image, real_resize_factor, mode='nearest') return image
  • 40. 40
  • 41. 41
  • 42. 42
  • 43. 43
  • 44. 44
  • 45. 45 def normalize_planes(npzarray): maxHU = 400. minHU = -1000. npzarray = (npzarray - minHU) / (maxHU - minHU) npzarray[npzarray > 1] = 1. npzarray[npzarray < 0] = 0. return npzarray
  • 46. 46 def normalize_planes(npzarray): maxHU = 400. minHU = -1000. npzarray = (npzarray - minHU) / (maxHU - minHU) npzarray[npzarray > 1] = 1. npzarray[npzarray < 0] = 0. return npzarray def zero_center(image): PIXEL_MEAN = 0.25 image = image - PIXEL_MEAN return image
  • 47. 47
  • 48. 48
  • 49. 49
  • 50. 50
  • 51. 51 def load_itk_image(filename): … return numpyImage, numpyOrigin, numpySpacing def read_csv(filename): … return annotations_dict OUTPUT_SPACING = [1.25, 1.25, 1.25]
  • 52. 52 def load_itk_image(filename): … return numpyImage, numpyOrigin, numpySpacing def read_csv(filename): … return annotations_dict OUTPUT_SPACING = [1.25, 1.25, 1.25]
  • 53. 53 def load_itk_image(filename): … return numpyImage, numpyOrigin, numpySpacing def world_2_voxel(world_coord, origin, spacing): stretched_voxel_coord = np.absolute(world_coord - origin) voxel_coord = stretched_voxel_coord / spacing return voxel_coord def read_csv(filename): … return annotations_dict OUTPUT_SPACING = [1.25, 1.25, 1.25]
  • 54. 54 def load_itk_image(filename): … return numpyImage, numpyOrigin, numpySpacing def world_2_voxel(world_coord, origin, spacing): stretched_voxel_coord = np.absolute(world_coord - origin) voxel_coord = stretched_voxel_coord / spacing return voxel_coord def read_csv(filename): … return annotations_dict OUTPUT_SPACING = [1.25, 1.25, 1.25]
  • 55. 55 def load_itk_image(filename): … return numpyImage, numpyOrigin, numpySpacing def world_2_voxel(world_coord, origin, spacing): stretched_voxel_coord = np.absolute(world_coord - origin) voxel_coord = stretched_voxel_coord / spacing return voxel_coord def read_csv(filename): … return annotations_dict OUTPUT_SPACING = [1.25, 1.25, 1.25]
  • 56. 56 def create_label(arr_shape, nodules, new_spacing, coord=False): # nodules = list of dict {‘position’, ‘diameter} for nodule in nodules: worldCoord = nodule['position'] worldCoord = np.asarray([worldCoord[2],worldCoord[1],worldCoord[0]]) # new_spacing came from resample voxelCoord = word_2_voxel(worldCoord, origin, new_spacing) voxelCoord = [int(i) for i in voxelCoord] diameter = nodule['diameter'] diameter = diameter / new_spacing[1] label = _create_mask(arr_shape, voxelCoord, diameter) return label
  • 57. 57 def create_label(arr_shape, nodules, new_spacing, coord=False): # nodules = list of dict {‘position’, ‘diameter} for nodule in nodules: worldCoord = nodule['position'] worldCoord = np.asarray([worldCoord[2],worldCoord[1],worldCoord[0]]) # new_spacing came from resample voxelCoord = word_2_voxel(worldCoord, origin, new_spacing) voxelCoord = [int(i) for i in voxelCoord] diameter = nodule['diameter'] diameter = diameter / new_spacing[1] label = _create_mask(arr_shape, voxelCoord, diameter) return label
  • 58. 58 def create_label(arr_shape, nodules, new_spacing, coord=False): # nodules = list of dict {‘position’, ‘diameter} for nodule in nodules: worldCoord = nodule['position'] worldCoord = np.asarray([worldCoord[2],worldCoord[1],worldCoord[0]]) # new_spacing came from resample voxelCoord = word_2_voxel(worldCoord, origin, new_spacing) voxelCoord = [int(i) for i in voxelCoord] diameter = nodule['diameter'] diameter = diameter / new_spacing[1] label = _create_mask(arr_shape, voxelCoord, diameter) return label
  • 59. 59 def create_label(arr_shape, nodules, new_spacing, coord=False): # nodules = list of dict {‘position’, ‘diameter} for nodule in nodules: worldCoord = nodule['position'] worldCoord = np.asarray([worldCoord[2],worldCoord[1],worldCoord[0]]) # new_spacing came from resample voxelCoord = word_2_voxel(worldCoord, origin, new_spacing) voxelCoord = [int(i) for i in voxelCoord] diameter = nodule['diameter'] diameter = diameter / new_spacing[1] label = _create_mask(arr_shape, voxelCoord, diameter) return label
  • 60. 60 def _create_mask(arr_shape, position, diameter): z_dim, y_dim, x_dim = arr_shape z_pos, y_pos, x_pos = position z,y,x = np.ogrid[-z_pos:z_dim-z_pos, -y_pos:y_dim-y_pos, -x_pos:x_dim-x_pos] mask = z**2 + y**2 + x**2 <= int(diameter//2)**2 return mask def create_label(arr_shape, nodules, new_spacing, coord=False): # nodules = list of dict {‘position’, ‘diameter} for nodule in nodules: worldCoord = nodule['position'] worldCoord = np.asarray([worldCoord[2],worldCoord[1],worldCoord[0]]) # new_spacing came from resample voxelCoord = word_2_voxel(worldCoord, origin, new_spacing) voxelCoord = [int(i) for i in voxelCoord] diameter = nodule['diameter'] diameter = diameter / new_spacing[1] label = _create_mask(arr_shape, voxelCoord, diameter) return label
  • 61. 61 def _create_mask(arr_shape, position, diameter): z_dim, y_dim, x_dim = arr_shape z_pos, y_pos, x_pos = position z,y,x = np.ogrid[-z_pos:z_dim-z_pos, -y_pos:y_dim-y_pos, -x_pos:x_dim-x_pos] mask = z**2 + y**2 + x**2 <= int(diameter//2)**2 return mask def create_label(arr_shape, nodules, new_spacing, coord=False): # nodules = list of dict {‘position’, ‘diameter} for nodule in nodules: worldCoord = nodule['position'] worldCoord = np.asarray([worldCoord[2],worldCoord[1],worldCoord[0]]) # new_spacing came from resample voxelCoord = word_2_voxel(worldCoord, origin, new_spacing) voxelCoord = [int(i) for i in voxelCoord] diameter = nodule['diameter'] diameter = diameter / new_spacing[1] label = _create_mask(arr_shape, voxelCoord, diameter) return label -2 -1 x 1 2
  • 62. 62 def _create_mask(arr_shape, position, diameter): z_dim, y_dim, x_dim = arr_shape z_pos, y_pos, x_pos = position z,y,x = np.ogrid[-z_pos:z_dim-z_pos, -y_pos:y_dim-y_pos, -x_pos:x_dim-x_pos] mask = z**2 + y**2 + x**2 <= int(diameter//2)**2 return mask False True x True False def create_label(arr_shape, nodules, new_spacing, coord=False): # nodules = list of dict {‘position’, ‘diameter} for nodule in nodules: worldCoord = nodule['position'] worldCoord = np.asarray([worldCoord[2],worldCoord[1],worldCoord[0]]) # new_spacing came from resample voxelCoord = word_2_voxel(worldCoord, origin, new_spacing) voxelCoord = [int(i) for i in voxelCoord] diameter = nodule['diameter'] diameter = diameter / new_spacing[1] label = _create_mask(arr_shape, voxelCoord, diameter) return label
  • 63. 63 def _create_mask(arr_shape, position, diameter): z_dim, y_dim, x_dim = arr_shape z_pos, y_pos, x_pos = position z,y,x = np.ogrid[-z_pos:z_dim-z_pos, -y_pos:y_dim-y_pos, -x_pos:x_dim-x_pos] mask = z**2 + y**2 + x**2 <= int(diameter//2)**2 return mask False True x True False def create_label(arr_shape, nodules, new_spacing, coord=False): # nodules = list of dict {‘position’, ‘diameter} for nodule in nodules: worldCoord = nodule['position'] worldCoord = np.asarray([worldCoord[2],worldCoord[1],worldCoord[0]]) # new_spacing came from resample voxelCoord = word_2_voxel(worldCoord, origin, new_spacing) voxelCoord = [int(i) for i in voxelCoord] diameter = nodule['diameter'] diameter = diameter / new_spacing[1] label = _create_mask(arr_shape, voxelCoord, diameter) return label
  • 64. 64
  • 65. 65
  • 66. 66
  • 67. 67
  • 68. 68
  • 69. 69
  • 70. 70
  • 71. 71
  • 72. 72
  • 73. 73
  • 74. 74 offset = patch_size // 2 image = np.pad(image, offset, 'constant', constant_values=np.min(image))
  • 75. 75 offset = patch_size // 2 image = np.pad(image, offset, 'constant', constant_values=np.min(image))
  • 76. 76 offset = patch_size // 2 image = np.pad(image, offset, 'constant', constant_values=np.min(image))
  • 77. 77 offset = patch_size // 2 image = np.pad(image, offset, 'constant', constant_values=np.min(image))
  • 78. 78 offset = patch_size // 2 image = np.pad(image, offset, 'constant', constant_values=np.min(image))
  • 79. 79 offset = patch_size // 2 image = np.pad(image, offset, 'constant', constant_values=np.min(image))
  • 80. 80 offset = patch_size // 2 stride = 8 move = offset // stride image = np.pad(image, offset + (stride * move), 'constant', constant_values=np.min(image))
  • 81. 81 offset = patch_size // 2 stride = 8 move = offset // stride image = np.pad(image, offset + (stride * move), 'constant', constant_values=np.min(image))
  • 82. 82 offset = patch_size // 2 stride = 8 move = offset // stride image = np.pad(image, offset + (stride * move), 'constant', constant_values=np.min(image))
  • 83. 83 offset = patch_size // 2 stride = 8 move = offset // stride image = np.pad(image, offset + (stride * move), 'constant', constant_values=np.min(image))
  • 84. 84 offset = patch_size // 2 stride = 8 move = offset // stride image = np.pad(image, offset + (stride * move), 'constant', constant_values=np.min(image))
  • 85. 85
  • 86. 86
  • 87. 87
  • 88. 88
  • 89. 89
  • 90. 90 with h5py.File(save_path + 'subset' + str(i) + '.h5', 'w') as hf: hf.create_dataset('nodule', data=nodule[:], compression='lzf') hf.create_dataset('label_nodule', data=nodule_label[:], compression='lzf') hf.create_dataset('non_nodule', data=non_nodule[:], compression='lzf') hf.create_dataset('label_non_nodule', data=non_nodule_label[:], compression='lzf')
  • 91. 91
  • 92. 92
  • 93. 93
  • 94. 94
  • 95. 95 import tftables import tensorflow as tf def input_transform(tbl_batch): labels = tbl_batch['nodule_label'] data = tbl_batch['nodule'] return labels, data loader = tftables.load_dataset(filename='/data/LUNA16/subset1.h5', dataset_path='/internal/h5/path', input_transform=input_transform, queue_size=256, batch_size=16) truth_batch, data_batch = loader.dequeue() result = CASED(truth_batch, data_batch) with tf.Session() as sess: with loader.begin(sess): for _ in range(num_iterations): sess.run(result)
  • 96. 96 import tftables import tensorflow as tf def input_transform(tbl_batch): labels = tbl_batch['nodule_label'] data = tbl_batch['nodule'] return labels, data loader = tftables.load_dataset(filename='/data/LUNA16/subset1.h5', dataset_path='/internal/h5/path', input_transform=input_transform, queue_size=256, batch_size=16) truth_batch, data_batch = loader.dequeue() result = CASED(truth_batch, data_batch) with tf.Session() as sess: with loader.begin(sess): for _ in range(num_iterations): sess.run(result)
  • 97. 97 import tftables import tensorflow as tf def input_transform(tbl_batch): labels = tbl_batch['nodule_label'] data = tbl_batch['nodule'] return labels, data loader = tftables.load_dataset(filename='/data/LUNA16/subset1.h5', dataset_path='/internal/h5/path', input_transform=input_transform, queue_size=256, batch_size=16) truth_batch, data_batch = loader.dequeue() result = CASED(truth_batch, data_batch) with tf.Session() as sess: with loader.begin(sess): for _ in range(num_iterations): sess.run(result)
  • 98. 98 import tftables import tensorflow as tf def input_transform(tbl_batch): labels = tbl_batch['nodule_label'] data = tbl_batch['nodule'] return labels, data loader = tftables.load_dataset(filename='/data/LUNA16/subset1.h5', dataset_path='/internal/h5/path', input_transform=input_transform, queue_size=256, batch_size=16) truth_batch, data_batch = loader.dequeue() result = CASED(truth_batch, data_batch) with tf.Session() as sess: with loader.begin(sess): for _ in range(num_iterations): sess.run(result)
  • 99. 99 import tftables import tensorflow as tf def input_transform(tbl_batch): labels = tbl_batch['nodule_label'] data = tbl_batch['nodule'] return labels, data loader = tftables.load_dataset(filename='/data/LUNA16/subset1.h5', dataset_path='/internal/h5/path', input_transform=input_transform, queue_size=256, batch_size=16) truth_batch, data_batch = loader.dequeue() result = CASED(truth_batch, data_batch) with tf.Session() as sess: with loader.begin(sess): for _ in range(num_iterations): sess.run(result)
  • 100. 100 import tftables import tensorflow as tf def input_transform(tbl_batch): labels = tbl_batch['nodule_label'] data = tbl_batch['nodule'] return labels, data loader = tftables.load_dataset(filename='/data/LUNA16/subset1.h5', dataset_path='/internal/h5/path', input_transform=input_transform, queue_size=256, batch_size=16) truth_batch, data_batch = loader.dequeue() result = CASED(truth_batch, data_batch) with tf.Session() as sess: with loader.begin(sess): for _ in range(num_iterations): sess.run(result)
  • 101. 101
  • 102. 102
  • 103. 103
  • 104. 104
  • 105. 105
  • 106. 106
  • 107. 107
  • 108. 108
  • 109. 109
  • 110. 110
  • 111. 111
  • 112. 112 x = scipy.ndimage.zoom(input=x, zoom=1/8, order=1) # nearest x = np.where(x > 0, 1.0, 0.0)
  • 113. 113 x = scipy.ndimage.zoom(input=x, zoom=1/8, order=1) # nearest x = np.where(x > 0, 1.0, 0.0)
  • 114. 114 x = scipy.ndimage.zoom(input=x, zoom=1/8, order=1) # nearest x = np.where(x > 0, 1.0, 0.0) x = skimage.measure.block_reduce(x, (9, 9, 9), np.max)
  • 115. 115 x = scipy.ndimage.zoom(input=x, zoom=1/8, order=1) # nearest x = np.where(x > 0, 1.0, 0.0) x = skimage.measure.block_reduce(x, (9, 9, 9), np.max)
  • 116. 116
  • 117. 117
  • 129. 129 𝒈 𝒏 𝒈 𝒓 p_x = 1.0 for i in iteration : p = uniform(0,1) if p <= p_x : # only nodule patch g_n_index = np.random.choice(N, size=batch_size, replace=False) batch_patch = nodule_patch[g_n_index] batch_y = nodule_patch_y[g_n_index] else : # all patch predictor_dict = Predictor(all_patch) # key = index, value = loss g_r_index = nlargest(batch_size, predictor_dict, key=predictor_dict.get) batch_patch = all_patch[g_r_index] batch_y = all_patch_y[g_r_index] p_x *= pow(1/M, 1/iteration) # p_x -> 0
  • 130. 130
  • 131. 131
  • 132. 132
  • 139. 139
  • 145. 145 exclude.csv def fp_per_scan(logit, label) : fp_list = [0.125, 0.25, 0.5, 1, 2, 4, 8] MIN_FROC = 0.125 MAX_FROC = 8 logit = np.reshape(logit, -1) label = np.reshape(label, -1) fpr, tpr, th = roc_curve(label, logit, pos_label=1.0) fps = fpr * negative_samples fps_itp = np.linspace(MIN_FROC, MAX_FROC, num=64) sens_itp = np.interp(fps_itp, fps, tpr)
  • 146. 146 exclude.csv def fp_per_scan(logit, label) : fp_list = [0.125, 0.25, 0.5, 1, 2, 4, 8] MIN_FROC = 0.125 MAX_FROC = 8 logit = np.reshape(logit, -1) label = np.reshape(label, -1) fpr, tpr, th = roc_curve(label, logit, pos_label=1.0) fps = fpr * negative_samples fps_itp = np.linspace(MIN_FROC, MAX_FROC, num=64) sens_itp = np.interp(fps_itp, fps, tpr)
  • 147. 147
  • 148. 148
  • 149. 149
  • 150. 150
  • 151. 151
  • 152. 152
  • 153. 153
  • 154. 154
  • 155. 155
  • 156. 156
  • 157. 157
  • 158. 158
  • 159. 159
  • 160. 160
  • 161. 161
  • 162. 162
  • 163. 163
  • 164. 164
  • 165. 165
  • 166. 166
  • 167. 167
  • 168. 168
  • 169. 169 from multiprocessing import Pool def nodule_hf(idx): with h5py.File(image_patch, 'r') as hf: nodule = hf['nodule'][idx:idx + get_data_num] return nodule process_num = 32 get_data_num = 64 with h5py.File(image_patch, 'r') as fin: nodule_range = range(0, len(fin['nodule']), get_data_num) pool = Pool(processes = process_num) pool_nodule = pool.map(nodule_hf, nodule_range) pool.close()
  • 170. 170 from multiprocessing import Pool def nodule_hf(idx): with h5py.File(image_patch, 'r') as hf: nodule = hf['nodule'][idx:idx + get_data_num] return nodule process_num = 32 get_data_num = 64 with h5py.File(image_patch, 'r') as fin: nodule_range = range(0, len(fin['nodule']), get_data_num) pool = Pool(processes = process_num) pool_nodule = pool.map(nodule_hf, nodule_range) pool.close()
  • 171. 171 from multiprocessing import Pool def nodule_hf(idx): with h5py.File(image_patch, 'r') as hf: nodule = hf['nodule'][idx:idx + get_data_num] return nodule process_num = 32 get_data_num = 64 with h5py.File(image_patch, 'r') as fin: nodule_range = range(0, len(fin['nodule']), get_data_num) pool = Pool(processes = process_num) pool_nodule = pool.map(nodule_hf, nodule_range) pool.close()
  • 172. 172 from multiprocessing import Pool def nodule_hf(idx): with h5py.File(image_patch, 'r') as hf: nodule = hf['nodule'][idx:idx + get_data_num] return nodule process_num = 32 get_data_num = 64 with h5py.File(image_patch, 'r') as fin: nodule_range = range(0, len(fin['nodule']), get_data_num) pool = Pool(processes = process_num) pool_nodule = pool.map(nodule_hf, nodule_range) pool.close()
  • 173. 173 from multiprocessing import Pool def nodule_hf(idx): with h5py.File(image_patch, 'r') as hf: nodule = hf['nodule'][idx:idx + get_data_num] return nodule process_num = 32 get_data_num = 64 with h5py.File(image_patch, 'r') as fin: nodule_range = range(0, len(fin['nodule']), get_data_num) pool = Pool(processes = process_num) pool_nodule = pool.map(nodule_hf, nodule_range) pool.close()
  • 174. 174 from multiprocessing import Pool def nodule_hf(idx): with h5py.File(image_patch, 'r') as hf: nodule = hf['nodule'][idx:idx + get_data_num] return nodule process_num = 32 get_data_num = 64 with h5py.File(image_patch, 'r') as fin: nodule_range = range(0, len(fin['nodule']), get_data_num) pool = Pool(processes = process_num) pool_nodule = pool.map(nodule_hf, nodule_range) pool.close()
  • 175. 175 from multiprocessing import Pool def nodule_hf(idx): with h5py.File(image_patch, 'r') as hf: nodule = hf['nodule'][idx:idx + get_data_num] return nodule process_num = 32 get_data_num = 64 with h5py.File(image_patch, 'r') as fin: nodule_range = range(0, len(fin['nodule']), get_data_num) pool = Pool(processes = process_num) pool_nodule = pool.map(nodule_hf, nodule_range) pool.close()
  • 176. 176 from multiprocessing import Pool def nodule_hf(idx): with h5py.File(image_patch, 'r') as hf: nodule = hf['nodule'][idx:idx + get_data_num] return nodule process_num = 32 get_data_num = 64 with h5py.File(image_patch, 'r') as fin: nodule_range = range(0, len(fin['nodule']), get_data_num) pool = Pool(processes = process_num) pool_nodule = pool.map(nodule_hf, nodule_range) pool.close()
  • 177. 177 from multiprocessing import Pool def nodule_hf(idx): with h5py.File(image_patch, 'r') as hf: nodule = hf['nodule'][idx:idx + get_data_num] return nodule process_num = 32 get_data_num = 64 with h5py.File(image_patch, 'r') as fin: nodule_range = range(0, len(fin['nodule']), get_data_num) pool = Pool(processes = process_num) pool_nodule = pool.map(nodule_hf, nodule_range) pool.close()
  • 178. 178 from multiprocessing import Pool def nodule_hf(idx): with h5py.File(image_patch, 'r') as hf: nodule = hf['nodule'][idx:idx + get_data_num] return nodule process_num = 32 get_data_num = 64 with h5py.File(image_patch, 'r') as fin: nodule_range = range(0, len(fin['nodule']), get_data_num) pool = Pool(processes = process_num) pool_nodule = pool.map(nodule_hf, nodule_range) pool.close()