Skip to content

Commit

Permalink
Merge branch 'master' into update_mr_model
Browse files Browse the repository at this point in the history
  • Loading branch information
wasserth committed Dec 17, 2024
2 parents 52478bf + 777dbcc commit 9eda247
Show file tree
Hide file tree
Showing 7 changed files with 24 additions and 19 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/run_tests_os.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,8 @@ jobs:
run-tests:
strategy:
matrix:
os: [ubuntu-latest, windows-latest, macos-latest] # fails on windows until https://github.com/MIC-DKFZ/nnUNet/issues/2396 is resolved
# os: [ubuntu-latest, macos-latest]
# os: [ubuntu-latest, windows-latest, macos-latest] # fails on windows until https://github.com/MIC-DKFZ/nnUNet/issues/2396 is resolved
os: [ubuntu-latest, macos-latest]
python-version: ["3.10"]
runs-on: ${{ matrix.os }}

Expand Down
2 changes: 2 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@
* add `totalseg_get_modality`
* change pi_time threshold for arterial late phase from 50s to 60s
* add oculomotor muscles model
* removed `rt_utils` and `p_tqdm` dependency


## Release 2.4.0
* add brain structures
Expand Down
2 changes: 1 addition & 1 deletion resources/train_nnunet.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
4. Preprocess `nnUNetv2_plan_and_preprocess -d <your_dataset_id> -pl ExperimentPlanner -c 3d_fullres -np 2`
5. Train `nnUNetv2_train <your_dataset_id> 3d_fullres 0 -tr nnUNetTrainerNoMirroring` (takes several days)
6. Predict test set `nnUNetv2_predict -i path/to/imagesTs -o path/to/labelsTs_predicted -d <your_dataset_id> -c 3d_fullres -tr nnUNetTrainerNoMirroring --disable_tta -f 0`
7. Evaluate `python resources/evaluate.py path/to/labelsTs path/to/labelsTs_predicted` (requires `pip install git+https://github.com/google-deepmind/surface-distance.git`). The resulting numbers should be similar to the ones in `resources/evaluate_results.txt` (since training is not deterministic the mean dice score across all classes can vary by up to one dice point)
7. Evaluate `python resources/evaluate.py path/to/labelsTs path/to/labelsTs_predicted` (requires `pip install git+https://github.com/google-deepmind/surface-distance.git` and `pip install p_tqdm`). The resulting numbers should be similar to the ones in `resources/evaluate_results.txt` (since training is not deterministic the mean dice score across all classes can vary by up to one dice point)
8. Done

> Note: This will not give you the same results as TotalSegmentator for two reasons:
Expand Down
2 changes: 0 additions & 2 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,12 +22,10 @@
'SimpleITK',
'nibabel>=2.3.0',
'tqdm>=4.45.0',
'p_tqdm',
'xvfbwrapper',
'nnunetv2>=2.2.1',
'requests==2.27.1;python_version<"3.10"',
'requests;python_version>="3.10"',
'rt_utils',
'dicom2nifti',
'pyarrow'
],
Expand Down
23 changes: 12 additions & 11 deletions totalsegmentator/nnunet.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
import numpy as np
import nibabel as nib
from nibabel.nifti1 import Nifti1Image
from p_tqdm import p_map
# from p_tqdm import p_map
import torch

from totalsegmentator.libs import nostdout
Expand Down Expand Up @@ -666,21 +666,22 @@ def nnUNet_predict_image(file_in: Union[str, Path, Nifti1Image], file_out, task_
if nora_tag != "None":
subprocess.call(f"/opt/nora/src/node/nora -p {nora_tag} --add {output_path} --addtag mask", shell=True)
else:
nib.save(img_pred, tmp_dir / "s01.nii.gz") # needed inside of threads

# Code for multithreaded execution
# Speed with different number of threads:
# 1: 46s, 2: 24s, 6: 11s, 10: 8s, 14: 8s
nib.save(img_pred, tmp_dir / "s01.nii.gz")
_ = p_map(partial(save_segmentation_nifti, tmp_dir=tmp_dir, file_out=file_out, nora_tag=nora_tag, header=new_header, task_name=task_name, quiet=quiet),
selected_classes.items(), num_cpus=nr_threads_saving, disable=quiet)
# _ = p_map(partial(save_segmentation_nifti, tmp_dir=tmp_dir, file_out=file_out, nora_tag=nora_tag, header=new_header, task_name=task_name, quiet=quiet),
# selected_classes.items(), num_cpus=nr_threads_saving, disable=quiet)

# Multihreaded saving with same functions as in nnUNet -> same speed as p_map
# pool = Pool(nr_threads_saving)
# results = []
# for k, v in selected_classes.items():
# results.append(pool.starmap_async(save_segmentation_nifti, ((k, v, tmp_dir, file_out, nora_tag),) ))
# _ = [i.get() for i in results] # this actually starts the execution of the async functions
# pool.close()
# pool.join()
pool = Pool(nr_threads_saving)
results = []
for k, v in selected_classes.items():
results.append(pool.starmap_async(save_segmentation_nifti, [((k, v), tmp_dir, file_out, nora_tag, new_header, task_name, quiet)]))
_ = [i.get() for i in results] # this actually starts the execution of the async functions
pool.close()
pool.join()
if not quiet: print(f" Saved in {time.time() - st:.2f}s")

# Postprocessing single files
Expand Down
6 changes: 6 additions & 0 deletions totalsegmentator/python_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,6 +98,12 @@ def totalsegmentator(input: Union[str, Path, Nifti1Image], output: Union[str, Pa
validate_device_type_api(device)
device = convert_device_to_cuda(device)

if output_type == "dicom":
try:
from rt_utils import RTStructBuilder
except ImportError:
raise ImportError("rt_utils is required for output_type='dicom'. Please install it with 'pip install rt_utils'.")

# available devices: gpu | cpu | mps | gpu:1, gpu:2, etc.
if device == "gpu":
device = "cuda"
Expand Down
4 changes: 1 addition & 3 deletions totalsegmentator/statistics.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@
import nibabel as nib
from nibabel.nifti1 import Nifti1Image
from tqdm import tqdm
from p_tqdm import p_map
import numpy.ma as ma

from totalsegmentator.map_to_binary import class_map
Expand Down Expand Up @@ -58,8 +57,7 @@ def get_radiomics_features(seg_file, img_file="ct.nii.gz"):

def get_radiomics_features_for_entire_dir(ct_file:Path, mask_dir:Path, file_out:Path):
masks = sorted(list(mask_dir.glob("*.nii.gz")))
stats = p_map(partial(get_radiomics_features, img_file=ct_file),
masks, num_cpus=1, disable=False)
stats = [get_radiomics_features(ct_file, mask) for mask in masks]
stats = {mask_name: stats for mask_name, stats in stats}
with open(file_out, "w") as f:
json.dump(stats, f, indent=4)
Expand Down

0 comments on commit 9eda247

Please sign in to comment.