Commit 38482051 by Eli Gibson

Merge branch '217-adding-classification-entry-point-squashed' into 'dev'

Resolve "adding classification entry point `net_classify`"

Closes #217

See merge request CMIC/NiftyNet!203
parents e429b23e 6dd95b33
Pipeline #10704 passed with stages
in 51 minutes 25 seconds
......@@ -49,9 +49,13 @@ testjob:
- tar -xzvf example_volumes_v0_2.tar.gz
# - wget -q https://www.dropbox.com/s/94wa4fl8f8k3aie/testing_data.tar.gz
# - tar -xzvf testing_data.tar.gz
- wget -q https://www.dropbox.com/s/p7b3t2c3mewtree/testing_data_v0_2.tar.gz
- tar -xzvf testing_data_v0_2.tar.gz
# - wget -q https://www.dropbox.com/s/p7b3t2c3mewtree/testing_data_v0_2.tar.gz
# - tar -xzvf testing_data_v0_2.tar.gz
- wget -q https://www.dropbox.com/s/2g8jr3wq8rw5xtv/testing_code_v0_3.tar.gz
- wget -q https://www.dropbox.com/s/5p5fdgy053tgmdj/testing_data_v0_3.tar.gz
- mkdir -p testing_data
- tar -xzvf testing_data_v0_3.tar.gz -C testing_data
- tar -xzvf testing_code_v0_3.tar.gz -C testing_data
#### python 3 tests ###################################
# save NiftyNet folder path just in case
- export niftynet_dir=$(pwd)
......@@ -71,6 +75,8 @@ testjob:
- pip install -r requirements-gpu.txt
# tests
- python net_download.py testing -r
- python net_segment.py train -c config/highres3dnet_config.ini --batch_size=1 --num_threads=2 --queue_length=40 --max_iter=10
- python net_segment.py inference -c config/highres3dnet_config.ini --batch_size 8 --spatial_window_size 64,64,64 --queue_length 64
......@@ -95,6 +101,9 @@ testjob:
- python net_segment.py train -c config/default_multimodal_segmentation.ini --batch_size 3
- python net_segment.py inference -c config/default_multimodal_segmentation.ini --spatial_window_size 64,64 --batch_size 7
- python net_classify.py train -c testing_data/test_classification.ini
- python net_classify.py inference -c testing_data/test_classification.ini
- python net_regress.py train -c config/default_monomodal_regression.ini --batch_size=1 --name toynet --max_iter 10
- python net_regress.py inference -c config/default_monomodal_regression.ini --batch_size=7 --name toynet --spatial_window_size 84,84,84
......@@ -114,6 +123,7 @@ testjob:
###############end of python3
######### Python 2 ###################### run python2 code with coverage wrapper
- python net_download.py testing -r
- coverage run -a --source . net_segment.py train -c config/highres3dnet_config.ini --batch_size=1 --num_threads=2 --queue_length=40 --max_iter=10
- coverage run -a --source . net_segment.py inference -c config/highres3dnet_config.ini --batch_size 8 --spatial_window_size 64,64,64 --queue_length 64
......@@ -138,6 +148,9 @@ testjob:
- coverage run -a --source . net_segment.py train -c config/default_multimodal_segmentation.ini --batch_size 3
- coverage run -a --source . net_segment.py inference -c config/default_multimodal_segmentation.ini --spatial_window_size 64,64 --batch_size 7
- coverage run -a --source . net_classify.py train -c testing_data/test_classification.ini
- coverage run -a --source . net_classify.py inference -c testing_data/test_classification.ini
- coverage run -a --source . net_regress.py train -c config/default_monomodal_regression.ini --max_iter 10 --name toynet --batch_size=2
- coverage run -a --source . net_run.py train -a net_regress -c config/default_monomodal_regression.ini --max_iter 10 --name toynet --batch_size=2
- coverage run -a --source . net_regress.py inference -c config/default_monomodal_regression.ini --name toynet --spatial_window_size 84,84,84 --batch_size 7
......@@ -197,11 +210,15 @@ quicktest:
- tar -xzvf example_volumes_v0_2.tar.gz
#- wget -q https://www.dropbox.com/s/94wa4fl8f8k3aie/testing_data.tar.gz
#- tar -xzvf testing_data.tar.gz
- wget -q https://www.dropbox.com/s/p7b3t2c3mewtree/testing_data_v0_2.tar.gz
- tar -xzvf testing_data_v0_2.tar.gz
# - wget -q https://www.dropbox.com/s/p7b3t2c3mewtree/testing_data_v0_2.tar.gz
# - tar -xzvf testing_data_v0_2.tar.gz
- wget -q https://www.dropbox.com/s/5p5fdgy053tgmdj/testing_data_v0_3.tar.gz
- mkdir -p testing_data
- tar -xzvf testing_data_v0_3.tar.gz -C testing_data
- coverage erase
# run only fast tests
- python net_download.py testing -r
- QUICKTEST=True coverage run -a --source . -m unittest discover -s "tests" -p "*_test.py"
- coverage report -m
......@@ -303,11 +320,18 @@ pip-installer:
- cat $package_importer
- python $package_importer
# test niftynet command
- net_download testing -r
- net_segment train -c $niftynet_dir/config/default_segmentation.ini --name toynet --batch_size 3 --max_iter 5
- net_segment inference -c $niftynet_dir/config/default_segmentation.ini --name toynet --spatial_window_size 80,80,80 --batch_size 8
- net_run train --app net_segment -c $niftynet_dir/config/default_segmentation.ini --name toynet --batch_size 3 --max_iter 5
- net_run inference --app net_segment -c $niftynet_dir/config/default_segmentation.ini --name toynet --spatial_window_size 80,80,80 --batch_size 8
- net_classify train -c extensions/testing/test_classification.ini
- net_classify inference -c extensions/testing/test_classification.ini
- net_run --app net_classify train -c extensions/testing/test_classification.ini
- net_run --app net_classify inference -c extensions/testing/test_classification.ini
- net_regress train -c $niftynet_dir/config/default_monomodal_regression.ini --max_iter 10 --name toynet --batch_size=2
- net_regress inference -c $niftynet_dir/config/default_monomodal_regression.ini --name toynet --spatial_window_size 84,84,84 --batch_size 7
- net_run train -a net_regress -c $niftynet_dir/config/default_monomodal_regression.ini --max_iter 10 --name toynet --batch_size=2
......@@ -360,12 +384,18 @@ pip-installer:
- python $package_importer
# test niftynet command
- ln -s /home/gitlab-runner/environments/niftynet/data/example_volumes ./example_volumes
- net_download testing -r
- net_segment train -c $niftynet_dir/config/default_segmentation.ini --name toynet --batch_size 3 --max_iter 5
- net_segment inference -c $niftynet_dir/config/default_segmentation.ini --name toynet --spatial_window_size 80,80,80 --batch_size 8
- net_run train --app net_segment -c $niftynet_dir/config/default_segmentation.ini --name toynet --batch_size 3 --max_iter 5
- net_run inference --app net_segment -c $niftynet_dir/config/default_segmentation.ini --name toynet --spatial_window_size 80,80,80 --batch_size 8
- net_classify train -c extensions/testing/test_classification.ini
- net_classify inference -c extensions/testing/test_classification.ini
- net_run --app net_classify train -c extensions/testing/test_classification.ini
- net_run --app net_classify inference -c extensions/testing/test_classification.ini
- net_regress train -c $niftynet_dir/config/default_monomodal_regression.ini --max_iter 10 --name toynet --batch_size=2
- net_regress inference -c $niftynet_dir/config/default_monomodal_regression.ini --name toynet --spatial_window_size 84,84,84 --batch_size 7
- net_run train -a net_regress -c $niftynet_dir/config/default_monomodal_regression.ini --max_iter 10 --name toynet --batch_size=2
......
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
from niftynet import main
if __name__ == "__main__":
sys.exit(main())
......@@ -13,9 +13,11 @@ from abc import ABCMeta, abstractmethod
import nibabel as nib
import numpy as np
import tensorflow as tf
from six import with_metaclass
from six import with_metaclass, string_types
import niftynet.io.misc_io as misc
from niftynet.io.misc_io import resolve_file_name
from niftynet.utilities.niftynet_global_config import NiftyNetGlobalConfig
class Loadable(with_metaclass(ABCMeta, object)):
......@@ -82,18 +84,13 @@ class DataFromFile(Loadable):
@file_path.setter
def file_path(self, path_array):
if isinstance(path_array, string_types):
path_array = (path_array,)
home_folder = NiftyNetGlobalConfig().get_niftynet_home_folder()
try:
if os.path.isfile(path_array):
self._file_path = (os.path.abspath(path_array),)
return
except (TypeError, AttributeError):
pass
try:
assert all([os.path.isfile(file_name) for file_name in path_array])
self._file_path = \
tuple(os.path.abspath(file_name) for file_name in path_array)
return
except (TypeError, AssertionError, AttributeError):
self._file_path = tuple(resolve_file_name(path, ('.', home_folder))
for path in path_array)
except (TypeError, AssertionError, AttributeError, IOError):
tf.logging.fatal(
"unrecognised file path format, should be a valid filename,"
"or a sequence of filenames %s", path_array)
......@@ -630,7 +627,9 @@ class ImageFactory(object):
pass
if image_type is None:
try:
assert all([os.path.isfile(path) for path in file_path])
home_folder = NiftyNetGlobalConfig().get_niftynet_home_folder()
file_path = [resolve_file_name(path, ('.', home_folder))
for path in file_path]
ndims = misc.infer_ndims_from_file(file_path[0])
ndims = ndims + (1 if len(file_path) > 1 else 0)
image_type = cls.INSTANCE_DICT.get(ndims, None)
......
......@@ -438,6 +438,15 @@ def to_absolute_path(input_path, model_root):
pass
return os.path.abspath(os.path.join(model_root, input_path))
def resolve_file_name(file_name, paths):
if os.path.isfile(file_name):
return os.path.abspath(file_name)
for path in paths:
if os.path.isfile(os.path.join(path,file_name)):
tf.logging.info('Resolving {} as {}'.format(file_name,os.path.join(path,file_name)))
return os.path.abspath(os.path.join(path,file_name))
tf.logging.info('Could not resolve {}'.format(file_name))
raise IOError
def resolve_checkpoint(checkpoint_name):
# For now only supports checkpoint_name where
......
......@@ -24,6 +24,7 @@ from niftynet.utilities.user_parameters_helper import standardise_section_name
from niftynet.utilities.util_common import \
damerau_levenshtein_distance as edit_distance
from niftynet.utilities.versioning import get_niftynet_version_string
from niftynet.io.misc_io import resolve_file_name
try:
import configparser
......@@ -81,6 +82,8 @@ def run():
# Resolve relative configuration file location
config_path = os.path.expanduser(meta_args.conf)
home_folder = NiftyNetGlobalConfig().get_niftynet_home_folder()
config_path = resolve_file_name(config_path,('.',home_folder))
if not os.path.isfile(config_path):
relative_conf_file = os.path.join(
NiftyNetGlobalConfig().get_default_examples_folder(),
......@@ -165,14 +168,16 @@ def run():
continue
input_data_args[section] = all_args[section]
# set the output path of csv list if not exists
csv_path = input_data_args[section].csv_file
if os.path.isfile(csv_path):
try:
csv_path = resolve_file_name(input_data_args[section].csv_file,
('.',home_folder))
input_data_args[section].csv_file = csv_path
# don't search files if csv specified in config
try:
delattr(input_data_args[section], 'path_to_search')
except AttributeError:
pass
else:
except IOError:
input_data_args[section].csv_file = ''
# preserve ``config_file`` and ``action parameter`` from the meta_args
......
......@@ -110,6 +110,7 @@ setup(
'net_regress=niftynet:main',
'net_gan=niftynet:main',
'net_autoencoder=niftynet:main',
'net_classify=niftynet:main',
],
},
)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment