Add Kitti depth dataset

Warning: Using this requires >175gb of disk space (tensorflow will also generate examples that will take up space)
This commit is contained in:
Michael Pivato
2021-04-22 12:13:48 +00:00
parent 02d8cd5810
commit 070aec6eed
3 changed files with 198 additions and 9 deletions

View File

@@ -87,13 +87,13 @@ def dense_nnconv5(size, weights=None, shape=(224, 224, 3), half_features=True):
return keras.Model(inputs=input, outputs=decoder, name="fast_dense_depth")
def load_nyu():
def load_nyu(download_dir='../nyu'):
"""
Load the nyu_v2 dataset train split. Will be downloaded to ../nyu
:return: nyu_v2 dataset builder
"""
builder = tfds.builder('nyu_depth_v2')
builder.download_and_prepare(download_dir='../nyu')
builder.download_and_prepare(download_dir=download_dir)
return builder \
.as_dataset(split='train', shuffle_files=True) \
.shuffle(buffer_size=1024) \
@@ -101,13 +101,13 @@ def load_nyu():
.map(lambda x: fd.crop_and_resize(x))
def load_nyu_evaluate():
def load_nyu_evaluate(download_dir='../nyu'):
"""
Load the nyu_v2 dataset validation split. Will be downloaded to ../nyu
:return: nyu_v2 dataset builder
"""
builder = tfds.builder('nyu_depth_v2')
builder.download_and_prepare(download_dir='../nyu')
builder.download_and_prepare(download_dir=download_dir)
return builder.as_dataset(split='validation').batch(1).map(lambda x: fd.crop_and_resize(x))