dsets.py 21 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580
  1. import copy
  2. import csv
  3. import functools
  4. import glob
  5. import math
  6. import os
  7. import random
  8. from collections import namedtuple
  9. import SimpleITK as sitk
  10. import numpy as np
  11. import scipy.ndimage.morphology as morph
  12. import torch
  13. import torch.cuda
  14. from torch.utils.data import Dataset
  15. import torch.nn as nn
  16. import torch.nn.functional as F
  17. from util.disk import getCache
  18. from util.util import XyzTuple, xyz2irc
  19. from util.logconf import logging
  20. log = logging.getLogger(__name__)
  21. # log.setLevel(logging.WARN)
  22. # log.setLevel(logging.INFO)
  23. log.setLevel(logging.DEBUG)
  24. raw_cache = getCache('part2ch13_raw')
  25. NoduleInfoTuple = namedtuple('NoduleInfoTuple', 'isMalignant_bool, diameter_mm, series_uid, center_xyz')
  26. MaskTuple = namedtuple('MaskTuple', 'raw_dense_mask, dense_mask, body_mask, air_mask, raw_nodule_mask, nodule_mask, lung_mask, ben_mask, mal_mask')
  27. @functools.lru_cache(1)
  28. def getNoduleInfoList(requireDataOnDisk_bool=True):
  29. # We construct a set with all series_uids that are present on disk.
  30. # This will let us use the data, even if we haven't downloaded all of
  31. # the subsets yet.
  32. mhd_list = glob.glob('data-unversioned/part2/luna/subset*/*.mhd')
  33. dataPresentOnDisk_set = {os.path.split(p)[-1][:-4] for p in mhd_list}
  34. diameter_dict = {}
  35. with open('data/part2/luna/annotations.csv', "r") as f:
  36. for row in list(csv.reader(f))[1:]:
  37. series_uid = row[0]
  38. annotationCenter_xyz = tuple([float(x) for x in row[1:4]])
  39. annotationDiameter_mm = float(row[4])
  40. diameter_dict.setdefault(series_uid, []).append((annotationCenter_xyz, annotationDiameter_mm))
  41. noduleInfo_list = []
  42. with open('data/part2/luna/candidates.csv', "r") as f:
  43. for row in list(csv.reader(f))[1:]:
  44. series_uid = row[0]
  45. if series_uid not in dataPresentOnDisk_set and requireDataOnDisk_bool:
  46. continue
  47. isMalignant_bool = bool(int(row[4]))
  48. candidateCenter_xyz = tuple([float(x) for x in row[1:4]])
  49. candidateDiameter_mm = 0.0
  50. for annotationCenter_xyz, annotationDiameter_mm in diameter_dict.get(series_uid, []):
  51. for i in range(3):
  52. delta_mm = abs(candidateCenter_xyz[i] - annotationCenter_xyz[i])
  53. if delta_mm > annotationDiameter_mm / 4:
  54. break
  55. else:
  56. candidateDiameter_mm = annotationDiameter_mm
  57. break
  58. noduleInfo_list.append(NoduleInfoTuple(isMalignant_bool, candidateDiameter_mm, series_uid, candidateCenter_xyz))
  59. noduleInfo_list.sort(reverse=True)
  60. return noduleInfo_list
  61. class Ct(object):
  62. def __init__(self, series_uid, buildMasks_bool=True):
  63. mhd_path = glob.glob('data-unversioned/part2/luna/subset*/{}.mhd'.format(series_uid))[0]
  64. ct_mhd = sitk.ReadImage(mhd_path)
  65. ct_a = np.array(sitk.GetArrayFromImage(ct_mhd), dtype=np.float32)
  66. # CTs are natively expressed in https://en.wikipedia.org/wiki/Hounsfield_scale
  67. # HU are scaled oddly, with 0 g/cc (air, approximately) being -1000 and 1 g/cc (water) being 0.
  68. # This gets rid of negative density stuff used to indicate out-of-FOV
  69. ct_a[ct_a < -1000] = -1000
  70. # This nukes any weird hotspots and clamps bone down
  71. ct_a[ct_a > 1000] = 1000
  72. self.series_uid = series_uid
  73. self.hu_a = ct_a
  74. self.origin_xyz = XyzTuple(*ct_mhd.GetOrigin())
  75. self.vxSize_xyz = XyzTuple(*ct_mhd.GetSpacing())
  76. self.direction_tup = tuple(int(round(x)) for x in ct_mhd.GetDirection())
  77. noduleInfo_list = getNoduleInfoList()
  78. self.benignInfo_list = [ni_tup
  79. for ni_tup in noduleInfo_list
  80. if not ni_tup.isMalignant_bool
  81. and ni_tup.series_uid == self.series_uid]
  82. self.benign_mask = self.buildAnnotationMask(self.benignInfo_list)[0]
  83. self.benign_indexes = sorted(set(self.benign_mask.nonzero()[0]))
  84. self.malignantInfo_list = [ni_tup
  85. for ni_tup in noduleInfo_list
  86. if ni_tup.isMalignant_bool
  87. and ni_tup.series_uid == self.series_uid]
  88. self.malignant_mask = self.buildAnnotationMask(self.malignantInfo_list)[0]
  89. self.malignant_indexes = sorted(set(self.malignant_mask.nonzero()[0]))
  90. def buildAnnotationMask(self, noduleInfo_list, threshold_hu = -500):
  91. boundingBox_a = np.zeros_like(self.hu_a, dtype=np.bool)
  92. for noduleInfo_tup in noduleInfo_list:
  93. center_irc = xyz2irc(
  94. noduleInfo_tup.center_xyz,
  95. self.origin_xyz,
  96. self.vxSize_xyz,
  97. self.direction_tup,
  98. )
  99. ci = int(center_irc.index)
  100. cr = int(center_irc.row)
  101. cc = int(center_irc.col)
  102. index_radius = 2
  103. try:
  104. while self.hu_a[ci + index_radius, cr, cc] > threshold_hu and \
  105. self.hu_a[ci - index_radius, cr, cc] > threshold_hu:
  106. index_radius += 1
  107. except IndexError:
  108. index_radius -= 1
  109. row_radius = 2
  110. try:
  111. while self.hu_a[ci, cr + row_radius, cc] > threshold_hu and \
  112. self.hu_a[ci, cr - row_radius, cc] > threshold_hu:
  113. row_radius += 1
  114. except IndexError:
  115. row_radius -= 1
  116. col_radius = 2
  117. try:
  118. while self.hu_a[ci, cr, cc + col_radius] > threshold_hu and \
  119. self.hu_a[ci, cr, cc - col_radius] > threshold_hu:
  120. col_radius += 1
  121. except IndexError:
  122. col_radius -= 1
  123. # assert index_radius > 0, repr([noduleInfo_tup.center_xyz, center_irc, self.hu_a[ci, cr, cc]])
  124. # assert row_radius > 0
  125. # assert col_radius > 0
  126. slice_tup = (
  127. slice(ci - index_radius, ci + index_radius + 1),
  128. slice(cr - row_radius, cr + row_radius + 1),
  129. slice(cc - col_radius, cc + row_radius + 1),
  130. )
  131. boundingBox_a[slice_tup] = True
  132. thresholded_a = boundingBox_a & (self.hu_a > threshold_hu)
  133. mask_a = morph.binary_dilation(thresholded_a, iterations=2)
  134. return mask_a, thresholded_a, boundingBox_a
  135. def build2dLungMask(self, mask_ndx):
  136. raw_dense_mask = self.hu_a[mask_ndx] > -300
  137. dense_mask = morph.binary_closing(raw_dense_mask, iterations=2)
  138. dense_mask = morph.binary_opening(dense_mask, iterations=2)
  139. body_mask = morph.binary_fill_holes(dense_mask)
  140. air_mask = morph.binary_fill_holes(body_mask & ~dense_mask)
  141. air_mask = morph.binary_erosion(air_mask, iterations=1)
  142. lung_mask = morph.binary_dilation(air_mask, iterations=5)
  143. raw_nodule_mask = self.hu_a[mask_ndx] > -600
  144. raw_nodule_mask &= air_mask
  145. nodule_mask = morph.binary_opening(raw_nodule_mask, iterations=1)
  146. ben_mask = morph.binary_dilation(nodule_mask, iterations=1)
  147. ben_mask &= ~self.malignant_mask[mask_ndx]
  148. mal_mask = self.malignant_mask[mask_ndx]
  149. return MaskTuple(
  150. raw_dense_mask,
  151. dense_mask,
  152. body_mask,
  153. air_mask,
  154. raw_nodule_mask,
  155. nodule_mask,
  156. lung_mask,
  157. ben_mask,
  158. mal_mask,
  159. )
  160. # def build3dLungMask(self):
  161. # air_mask, lung_mask, dense_mask, denoise_mask, body_mask, ben_mask, mal_mask = mask_list = \
  162. # [np.zeros_like(self.hu_a, dtype=np.bool) for _ in range(6)]
  163. #
  164. # for mask_ndx in range(self.hu_a.shape[0]):
  165. # for i, mask_a in enumerate(self.build2dLungMask(mask_ndx)):
  166. # mask_list[i][mask_ndx] = mask_a
  167. #
  168. # return MaskTuple(air_mask, lung_mask, dense_mask, denoise_mask, body_mask, ben_mask, mal_mask)
  169. def getRawNodule(self, center_xyz, width_irc):
  170. center_irc = xyz2irc(center_xyz, self.origin_xyz, self.vxSize_xyz, self.direction_tup)
  171. slice_list = []
  172. for axis, center_val in enumerate(center_irc):
  173. try:
  174. start_ndx = int(round(center_val - width_irc[axis]/2))
  175. except:
  176. log.debug([center_val, width_irc, center_xyz, center_irc])
  177. raise
  178. end_ndx = int(start_ndx + width_irc[axis])
  179. assert center_val >= 0 and center_val < self.hu_a.shape[axis], repr([self.series_uid, center_xyz, self.origin_xyz, self.vxSize_xyz, center_irc, axis])
  180. if start_ndx < 0:
  181. # log.warning("Crop outside of CT array: {} {}, center:{} shape:{} width:{}".format(
  182. # self.series_uid, center_xyz, center_irc, self.hu_a.shape, width_irc))
  183. start_ndx = 0
  184. end_ndx = int(width_irc[axis])
  185. if end_ndx > self.hu_a.shape[axis]:
  186. # log.warning("Crop outside of CT array: {} {}, center:{} shape:{} width:{}".format(
  187. # self.series_uid, center_xyz, center_irc, self.hu_a.shape, width_irc))
  188. end_ndx = self.hu_a.shape[axis]
  189. start_ndx = int(self.hu_a.shape[axis] - width_irc[axis])
  190. slice_list.append(slice(start_ndx, end_ndx))
  191. ct_chunk = self.hu_a[tuple(slice_list)]
  192. return ct_chunk, center_irc
  193. ctCache_depth = 5
  194. @functools.lru_cache(ctCache_depth, typed=True)
  195. def getCt(series_uid):
  196. return Ct(series_uid)
  197. @raw_cache.memoize(typed=True)
  198. def getCtRawNodule(series_uid, center_xyz, width_irc):
  199. ct = getCt(series_uid)
  200. ct_chunk, center_irc = ct.getRawNodule(center_xyz, width_irc)
  201. return ct_chunk, center_irc
  202. @raw_cache.memoize(typed=True)
  203. def getCtSampleSize(series_uid):
  204. ct = Ct(series_uid, buildMasks_bool=False)
  205. return len(ct.benign_indexes)
  206. def getCtAugmentedNodule(
  207. augmentation_dict,
  208. series_uid, center_xyz, width_irc,
  209. use_cache=True):
  210. if use_cache:
  211. ct_chunk, center_irc = getCtRawNodule(series_uid, center_xyz, width_irc)
  212. else:
  213. ct = getCt(series_uid)
  214. ct_chunk, center_irc = ct.getRawNodule(center_xyz, width_irc)
  215. ct_t = torch.tensor(ct_chunk).unsqueeze(0).unsqueeze(0).to(torch.float32)
  216. transform_t = torch.eye(4).to(torch.float64)
  217. for i in range(3):
  218. if 'flip' in augmentation_dict:
  219. if random.random() > 0.5:
  220. transform_t[i,i] *= -1
  221. if 'offset' in augmentation_dict:
  222. offset_float = augmentation_dict['offset']
  223. random_float = (random.random() * 2 - 1)
  224. transform_t[3,i] = offset_float * random_float
  225. if 'scale' in augmentation_dict:
  226. scale_float = augmentation_dict['scale']
  227. random_float = (random.random() * 2 - 1)
  228. transform_t[i,i] *= 1.0 + scale_float * random_float
  229. if 'rotate' in augmentation_dict:
  230. angle_rad = random.random() * math.pi * 2
  231. s = math.sin(angle_rad)
  232. c = math.cos(angle_rad)
  233. rotation_t = torch.tensor([
  234. [c, -s, 0, 0],
  235. [s, c, 0, 0],
  236. [0, 0, 1, 0],
  237. [0, 0, 0, 1],
  238. ], dtype=torch.float64)
  239. transform_t @= rotation_t
  240. affine_t = F.affine_grid(
  241. transform_t[:3].unsqueeze(0).to(torch.float32),
  242. ct_t.size(),
  243. )
  244. augmented_chunk = F.grid_sample(
  245. ct_t,
  246. affine_t,
  247. padding_mode='border'
  248. ).to('cpu')
  249. if 'noise' in augmentation_dict:
  250. noise_t = torch.randn_like(augmented_chunk)
  251. noise_t *= augmentation_dict['noise']
  252. augmented_chunk += noise_t
  253. return augmented_chunk[0], center_irc
  254. class LunaDataset(Dataset):
  255. def __init__(self,
  256. val_stride=0,
  257. isValSet_bool=None,
  258. series_uid=None,
  259. sortby_str='random',
  260. ratio_int=0,
  261. augmentation_dict=None,
  262. noduleInfo_list=None,
  263. ):
  264. self.ratio_int = ratio_int
  265. self.augmentation_dict = augmentation_dict
  266. if noduleInfo_list:
  267. self.noduleInfo_list = copy.copy(noduleInfo_list)
  268. self.use_cache = False
  269. else:
  270. self.noduleInfo_list = copy.copy(getNoduleInfoList())
  271. self.use_cache = True
  272. if series_uid:
  273. self.series_list = [series_uid]
  274. else:
  275. self.series_list = sorted(set(noduleInfo_tup.series_uid for noduleInfo_tup in getNoduleInfoList()))
  276. if isValSet_bool:
  277. assert val_stride > 0, val_stride
  278. self.series_list = self.series_list[::val_stride]
  279. assert self.series_list
  280. elif val_stride > 0:
  281. del self.series_list[::val_stride]
  282. assert self.series_list
  283. series_set = set(self.series_list)
  284. self.noduleInfo_list = [x for x in self.noduleInfo_list if x.series_uid in series_set]
  285. if sortby_str == 'random':
  286. random.shuffle(self.noduleInfo_list)
  287. elif sortby_str == 'series_uid':
  288. self.noduleInfo_list.sort(key=lambda x: (x[2], x[3])) # sorting by series_uid, center_xyz)
  289. elif sortby_str == 'malignancy_size':
  290. pass
  291. else:
  292. raise Exception("Unknown sort: " + repr(sortby_str))
  293. self.benign_list = [nt for nt in self.noduleInfo_list if not nt.isMalignant_bool]
  294. self.malignant_list = [nt for nt in self.noduleInfo_list if nt.isMalignant_bool]
  295. log.info("{!r}: {} {} samples, {} ben, {} mal, {} ratio".format(
  296. self,
  297. len(self.noduleInfo_list),
  298. "validation" if isValSet_bool else "training",
  299. len(self.benign_list),
  300. len(self.malignant_list),
  301. '{}:1'.format(self.ratio_int) if self.ratio_int else 'unbalanced'
  302. ))
  303. def shuffleSamples(self):
  304. if self.ratio_int:
  305. random.shuffle(self.benign_list)
  306. random.shuffle(self.malignant_list)
  307. def __len__(self):
  308. if self.ratio_int:
  309. # return 20000
  310. return 200000
  311. else:
  312. return len(self.noduleInfo_list)
  313. def __getitem__(self, ndx):
  314. if self.ratio_int:
  315. malignant_ndx = ndx // (self.ratio_int + 1)
  316. if ndx % (self.ratio_int + 1):
  317. benign_ndx = ndx - 1 - malignant_ndx
  318. nodule_tup = self.benign_list[benign_ndx % len(self.benign_list)]
  319. else:
  320. nodule_tup = self.malignant_list[malignant_ndx % len(self.malignant_list)]
  321. else:
  322. nodule_tup = self.noduleInfo_list[ndx]
  323. width_irc = (32, 48, 48)
  324. if self.augmentation_dict:
  325. nodule_t, center_irc = getCtAugmentedNodule(
  326. self.augmentation_dict,
  327. nodule_tup.series_uid,
  328. nodule_tup.center_xyz,
  329. width_irc,
  330. self.use_cache,
  331. )
  332. elif self.use_cache:
  333. nodule_a, center_irc = getCtRawNodule(
  334. nodule_tup.series_uid,
  335. nodule_tup.center_xyz,
  336. width_irc,
  337. )
  338. nodule_t = torch.from_numpy(nodule_a).to(torch.float32)
  339. nodule_t = nodule_t.unsqueeze(0)
  340. else:
  341. ct = getCt(nodule_tup.series_uid)
  342. nodule_a, center_irc = ct.getRawNodule(
  343. nodule_tup.center_xyz,
  344. width_irc,
  345. )
  346. nodule_t = torch.from_numpy(nodule_a).to(torch.float32)
  347. nodule_t = nodule_t.unsqueeze(0)
  348. malignant_t = torch.tensor([
  349. not nodule_tup.isMalignant_bool,
  350. nodule_tup.isMalignant_bool
  351. ],
  352. dtype=torch.long,
  353. )
  354. # log.debug([type(center_irc), center_irc])
  355. return nodule_t, malignant_t, nodule_tup.series_uid, torch.tensor(center_irc)
  356. class PrepcacheLunaDataset(LunaDataset):
  357. def __getitem__(self, ndx):
  358. nodule_t, malignant_t, series_uid, center_t = super().__getitem__(ndx)
  359. getCtSampleSize(series_uid)
  360. return nodule_t, malignant_t, series_uid, center_t
  361. class Luna2dSegmentationDataset(Dataset):
  362. def __init__(self,
  363. val_stride=0,
  364. isValSet_bool=None,
  365. series_uid=None,
  366. contextSlices_count=2,
  367. augmentation_dict=None,
  368. fullCt_bool=False,
  369. ):
  370. self.contextSlices_count = contextSlices_count
  371. self.augmentation_dict = augmentation_dict
  372. if series_uid:
  373. self.series_list = [series_uid]
  374. else:
  375. self.series_list = sorted(set(noduleInfo_tup.series_uid for noduleInfo_tup in getNoduleInfoList()))
  376. if isValSet_bool:
  377. assert val_stride > 0, val_stride
  378. self.series_list = self.series_list[::val_stride]
  379. assert self.series_list
  380. elif val_stride > 0:
  381. del self.series_list[::val_stride]
  382. assert self.series_list
  383. self.sample_list = []
  384. for series_uid in self.series_list:
  385. if fullCt_bool:
  386. self.sample_list.extend([(series_uid, ct_ndx) for ct_ndx in range(getCt(series_uid).hu_a.shape[0])])
  387. else:
  388. self.sample_list.extend([(series_uid, ct_ndx) for ct_ndx in range(getCtSampleSize(series_uid))])
  389. log.info("{!r}: {} {} series, {} slices".format(
  390. self,
  391. len(self.series_list),
  392. {None: 'general', True: 'validation', False: 'training'}[isValSet_bool],
  393. len(self.sample_list),
  394. ))
  395. def __len__(self):
  396. return len(self.sample_list) #// 100
  397. def __getitem__(self, ndx):
  398. if isinstance(ndx, int):
  399. series_uid, sample_ndx = self.sample_list[ndx % len(self.sample_list)]
  400. ct = getCt(series_uid)
  401. ct_ndx = self.sample_list[sample_ndx][1]
  402. useAugmentation_bool = False
  403. else:
  404. series_uid, ct_ndx, useAugmentation_bool = ndx
  405. ct = getCt(series_uid)
  406. ct_t = torch.zeros((self.contextSlices_count * 2 + 1 + 1, 512, 512))
  407. start_ndx = ct_ndx - self.contextSlices_count
  408. end_ndx = ct_ndx + self.contextSlices_count + 1
  409. for i, context_ndx in enumerate(range(start_ndx, end_ndx)):
  410. context_ndx = max(context_ndx, 0)
  411. context_ndx = min(context_ndx, ct.hu_a.shape[0] - 1)
  412. ct_t[i] = torch.from_numpy(ct.hu_a[context_ndx].astype(np.float32))
  413. ct_t /= 1000
  414. mask_tup = ct.build2dLungMask(ct_ndx)
  415. ct_t[-1] = torch.from_numpy(mask_tup.lung_mask.astype(np.float32))
  416. nodule_t = torch.from_numpy(
  417. (mask_tup.mal_mask | mask_tup.ben_mask).astype(np.float32)
  418. ).unsqueeze(0)
  419. ben_t = torch.from_numpy(mask_tup.ben_mask.astype(np.float32)).unsqueeze(0)
  420. mal_t = torch.from_numpy(mask_tup.mal_mask.astype(np.float32)).unsqueeze(0)
  421. label_int = mal_t.max() + ben_t.max() * 2
  422. if self.augmentation_dict and useAugmentation_bool:
  423. if 'rotate' in self.augmentation_dict:
  424. if random.random() > 0.5:
  425. ct_t = ct_t.rot90(1, [1, 2])
  426. nodule_t = nodule_t.rot90(1, [1, 2])
  427. if 'flip' in self.augmentation_dict:
  428. dims = [d+1 for d in range(2) if random.random() > 0.5]
  429. if dims:
  430. ct_t = ct_t.flip(dims)
  431. nodule_t = nodule_t.flip(dims)
  432. if 'noise' in self.augmentation_dict:
  433. noise_t = torch.randn_like(ct_t)
  434. noise_t *= self.augmentation_dict['noise']
  435. ct_t += noise_t
  436. return ct_t, nodule_t, label_int, ben_t, mal_t, ct.series_uid, ct_ndx
  437. class TrainingLuna2dSegmentationDataset(Luna2dSegmentationDataset):
  438. def __init__(self, *args, batch_size=80, **kwargs):
  439. self.needsShuffle_bool = True
  440. self.batch_size = batch_size
  441. # self.rotate_frac = 0.5 * len(self.series_list) / len(self)
  442. super().__init__(*args, **kwargs)
  443. def __len__(self):
  444. return 50000
  445. def __getitem__(self, ndx):
  446. if self.needsShuffle_bool:
  447. random.shuffle(self.series_list)
  448. self.needsShuffle_bool = False
  449. if isinstance(ndx, int):
  450. if ndx % self.batch_size == 0:
  451. self.series_list.append(self.series_list.pop(0))
  452. series_uid = self.series_list[ndx % ctCache_depth]
  453. ct = getCt(series_uid)
  454. if ndx % 3 == 0:
  455. ct_ndx = random.choice(ct.malignant_indexes or ct.benign_indexes)
  456. elif ndx % 3 == 1:
  457. ct_ndx = random.choice(ct.benign_indexes)
  458. elif ndx % 3 == 2:
  459. ct_ndx = random.choice(list(range(ct.hu_a.shape[0])))
  460. useAugmentation_bool = True
  461. else:
  462. series_uid, ct_ndx, useAugmentation_bool = ndx
  463. return super().__getitem__((series_uid, ct_ndx, useAugmentation_bool))