8000 ENH: Drop FSL BET to estimate the "outskin" (head) mask by oesteban · Pull Request #1105 · nipreps/mriqc · GitHub
[go: up one dir, main page]
More Web Proxy on the site http://driver.im/
Skip to content

ENH: Drop FSL BET to estimate the "outskin" (head) mask #1105

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 4 commits into from
Mar 31, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 5 additions & 5 deletions .github/workflows/pythonpackage.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ['3.8', '3.9', '3.11']
python-version: ['3.8', '3.9'] # , '3.11']
pip: ["pip==21.2", "pip~=23.0"]

steps:
Expand All @@ -38,7 +38,7 @@ jobs:
run: |
python -m venv /tmp/buildenv
source /tmp/buildenv/bin/activate
python -m pip install -U build hatch hatchling pip twine docutils
python -m pip install -U build hatch hatchling pip twine docutils wheel

python -m build -s -w
python -m twine check dist/mriqc-*
Expand All @@ -56,7 +56,7 @@ jobs:
run: |
python -m venv /tmp/pip
source /tmp/pip/bin/activate
python -m pip install -U hatch hatchling "${{ matrix.pip }}"
python -m pip install -U hatch hatchling "${{ matrix.pip }}" wheel "cython==3.0.0b2" "numpy ~=1.20" scipy
python -m pip install .
INSTALLED_VERSION=$(python -c 'import mriqc as qc; print(qc.__version__, end="")')
echo "VERSION: \"${THISVERSION}\""
Expand All @@ -66,7 +66,7 @@ jobs:
run: |
python -m venv /tmp/install_sdist
source /tmp/install_sdist/bin/activate
python -m pip install -U hatch hatchling "${{ matrix.pip }}"
python -m pip install -U hatch hatchling "${{ matrix.pip }}" wheel "cython==3.0.0b2" "numpy ~=1.20" scipy
python -m pip install /tmp/package/mriqc*.tar.gz
INSTALLED_VERSION=$(python -c 'import mriqc as qc; print(qc.__version__, end="")')
echo "VERSION: \"${THISVERSION}\""
Expand All @@ -76,7 +76,7 @@ jobs:
run: |
python -m venv /tmp/install_wheel
source /tmp/install_wheel/bin/activate
python -m pip install -U "setuptools >= 45" "${{ matrix.pip }}"
python -m pip install -U hatch hatchling "${{ matrix.pip }}" wheel "cython==3.0.0b2" "numpy ~=1.20" scipy
python -m pip install /tmp/package/mriqc*.whl
INSTALLED_VERSION=$(python -c 'import mriqc as qc; print(qc.__version__, end="")')
echo "INSTALLED: \"${INSTALLED_VERSION}\""
Expand Down
1 change: 1 addition & 0 deletions docs/source/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@

# Mock modules in autodoc:
autodoc_mock_imports = [
"dipy",
"matplotlib",
"nilearn",
"numpy",
Expand Down
8 changes: 4 additions & 4 deletions mriqc/data/testdata/group_T1w.tsv
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
bids_name cjv cnr efc fber fwhm_avg fwhm_x fwhm_y fwhm_z icvs_csf icvs_gm icvs_wm inu_med inu_range qi_1 qi_2 rpve_csf rpve_gm rpve_wm size_x size_y size_z snr_csf snr_gm snr_total snr_wm snrd_csf snrd_gm snrd_total snrd_wm spacing_x spacing_y spacing_z summary_bg_k summary_bg_mad summary_bg_mean summary_bg_median summary_bg_n summary_bg_p05 summary_bg_p95 summary_bg_stdv summary_csf_k summary_csf_mad summary_csf_mean summary_csf_median summary_csf_n summary_csf_p05 summary_csf_p95 summary_csf_stdv summary_gm_k summary_gm_mad summary_gm_mean summary_gm_median summary_gm_n summary_gm_p05 summary_gm_p95 summary_gm_stdv summary_wm_k summary_wm_mad summary_wm_mean summary_wm_median summary_wm_n summary_wm_p05 summary_wm_p95 summary_wm_stdv tpm_overlap_csf tpm_overlap_gm tpm_overlap_wm wm2max
sub-50137_T1w 0.37313632227722987 3.5844663252515097 0.6960134252231683 7605.10513033907 4.202703333333333 4.04437 4.57298 3.99076 0.18062183162155526 0.45129552684595725 0.3680826415324875 0.6880311369895935 0.3810786575078964 0.0 0.0016246831072444018 18.825247702549973 8.39017106314948 11.649061977664282 208 256 176 1.6993239139994756 9.60548868379397 9.580703779775568 17.43729874153326 5.973337476873543 18.14962818878457 17.123197682080704 27.246627380583995 1.0 1.0 1.0 367.95295392615037 0.0 7.789915131389547 0.0 2969163.0 0.0 48.733814522624016 24.04565664241587 19.004358184769764 99.59667629101651 240.17309068576688 219.24109540879726 23305.0 76.80156033039093 460.92335920929895 129.01389182901477 0.027295873315861918 68.99333393977689 666.3344565866706 666.1509383618832 48318.0 551.0137391388416 780.3517317920923 69.35035445376488 0.37152398970452927 55.593053856985584 1001.987130865773 1000.0406734496355 249478.0 911.7335358560085 1099.2182608991861 57.350549761438494 0.1707051203342759 0.4671890606536107 0.5031855306067675 0.41705010441957996
sub-50152_T1w 0.31516709879412075 4.172440280943563 0.6407575000929343 3312.974194636532 3.538223004967018 3.5068090149010525 3.72369 3.38417 0.14842499625730998 0.5250892530167288 0.3264857507259612 0.6055912971496582 0.2994886189699173 0.0 0.00504991530578035 24.0285265653838 7.9353643561617515 13.148145318541953 160 239 200 2.510834587303088 11.943084741826837 11.615337183684636 20.39209222192398 11.494317323697395 23.896009896419393 23.551368852279484 35.26377933672167 1.100000023841858 1.0 1.0 630.6396765272589 0.0 9.778584125865612 0.0 2189774.0 0.0 45.05020335316658 18.57887643142015 0.7961230412523719 114.96067825895601 334.6061178320976 325.96494595706463 13917.0 121.97151667177677 557.9582219704985 129.8186811509053 0.08243483568350429 55.301123908911876 678.5507979221064 677.661957219243 56655.0 585.84353428334 774.0235786288977 56.740447818984975 0.5065511281865795 46.301248062937475 1000.1093290204658 1000.0381581634283 147620.0 920.0549581423402 1080.6341173063959 49.040322104779726 0.14675306814451586 0.4834100383925821 0.47801106843313007 0.49068974560713347
sub-50785_T1w 0.3739190780358043 3.460710620103788 0.7530480076783987 -1.0 3.499472057939973 3.46213 3.7065461738199192 3.32974 0.189876038761227 0.44213808013778255 0.36798588110099045 0.9646123945713043 0.2733038604259488 0.0 0.0018900993982801935 26.060490432885494 11.501433085474199 15.490402541642467 256 180 256 1.6733744484964148 7.939790311775997 6.544267894218604 10.019638922383399 8.769148428721659 26.034762965244912 26.791448202358822 45.5704332131099 1.0 0.9999875426292419 1.0 226.8740565908444 0.0 1.9238560057587766 0.0 5103243.0 0.0 0.0 14.376347926781678 0.9433749341342765 118.2763764450988 205.39404429515787 192.4306584596634 20108.0 39.272444665431976 405.72885352373123 114.99271645101418 0.1807781348962867 69.97938871717868 571.6115923516921 571.3082200586796 21002.0 452.9962001532316 692.0828685075045 71.95336352499645 1.2097531340465286 90.31664813841826 1009.8780921704935 1000.0000046491623 178124.0 863.9937826395035 1189.4063824415207 99.80371601794556 0.1680471213771674 0.4634107232994584 0.4858448244116643 0.38380923954498725
sub-51187_T1w 0.33882862703385835 3.9445394143408588 0.6068733970705135 4679.158051897638 3.5394978356516567 4.25443345257453 2.965211919410108 3.398848134970332 0.16404671986716 0.4935345249617393 0.3424187551711007 0.8655245900154114 0.6595467209815977 0.0 6.414677477794047e-05 26.046560208106577 9.5216518888818 14.574225774565697 256 132 256 0.5218688172129379 7.963006853091069 7.5318240199687905 14.110596389602364 6.3902127171011305 14.38001570924317 15.128622286499123 24.61563843315307 0.8593999743461609 1.5000007152557373 0.8593999743461609 20.953484796959334 0.0 11.339826321213017 0.0 2337817.0 0.0 64.88082966953516 26.61464121953738 3.8791461166165755 170.97945856656423 434.6798289708983 259.59970897994936 5336.0 45.10847321804613 1694.362357551232 497.3958473249557 0.004220801228355775 73.69218439433283 582.6507497875704 584.1821013651788 47874.0 459.6302606094629 698.9381590856239 73.36123286903359 0.6090564532154255 67.19881568745514 1002.0922130810112 999.9999775439501 138601.0 889.7433086000383 1121.053142476827 70.86846951393275 0.13692615298141483 0.47923963498730343 0.4788423111786441 0.340853963466229
sub-50137_T1w 0.37313632227722987 3.62639210294697 0.6960134252231683 8056.106138453218 4.202703333333333 4.04437 4.57298 3.99076 0.18062183162155526 0.45129552684595725 0.3680826415324875 0.6880311369895935 0.3810786575078964 0.0 0.0024918357654802447 18.825247702549973 8.39017106314948 11.649061977664282 208 256 176 1.6993239139994756 9.60548868379397 9.580703779775568 17.43729874153326 7.380605799348262 22.425528707323352 21.157279762786626 33.66570478168827 1.0 1.0 1.0 146.5989679246792 0.0 5.901038945416854 0.0 2641215.0 0.0 40.38758897781372 19.460844527268932 19.004358184769764 99.59667629101651 240.17309068576688 219.24109540879726 23305.0 76.80156033039093 460.92335920929895 129.01389182901477 0.027295873315861918 68.99333393977689 666.3344565866706 666.1509383618832 48318.0 551.0137391388416 780.3517317920923 69.35035445376488 0.37152398970452927 55.593053856985584 1001.987130865773 1000.0406734496355 249478.0 911.7335358560085 1099.2182608991861 57.350549761438494 0.1707051203342759 0.4671890606536107 0.5031855306067675 0.41705010441957996
sub-50152_T1w 0.31516709879412075 4.232788516339497 0.6407575000929343 7298.3032720545725 3.538223004967018 3.5068090149010525 3.72369 3.38417 0.14842499625730998 0.5250892530167288 0.3264857507259612 0.6055912971496582 0.2994886189699173 0.0 0.006326195864496759 24.0285265653838 7.9353643561617515 13.148145318541953 160 239 200 2.510834587303088 11.943084741826837 11.615337183684636 20.39209222192398 16.08942388387145 33.44896626126941 32.966547367585754 49.361251957616396 1.100000023841858 1.0 1.0 7.265829242979748 0.0 7.053836383681529 0.0 1856628.0 0.0 35.84927199035883 13.272787314316249 0.7961230412523719 114.96067825895601 334.6061178320976 325.96494595706463 13917.0 121.97151667177677 557.9582219704985 129.8186811509053 0.08243483568350429 55.301123908911876 678.5507979221064 677.661957219243 56655.0 585.84353428334 774.0235786288977 56.740447818984975 0.5065511281865795 46.301248062937475 1000.1093290204658 1000.0381581634283 147620.0 920.0549581423402 1080.6341173063959 49.040322104779726 0.14675306814451586 0.4834100383925821 0.47801106843313007 0.49068974560713347
sub-50785_T1w 0.3739190780358043 3.4014596518572464 0.7530480076783987 -1.0 3.499472057939973 3.46213 3.7065461738199192 3.32974 0.189876038761227 0.44213808013778255 0.36798588110099045 0.9646123945713043 0.2733038604259488 0.0 0.00017084144889618273 26.060490432885494 11.501433085474199 15.490402541642467 256 180 256 1.6733744484964148 7.939790311775997 6.544267894218604 10.019638922383399 4.6159237347095345 13.704236081259884 14.102541729076087 23.987465371258843 1.0 0.9999875426292419 1.0 1456.5764331681412 0.0 1.213921365894732 0.0 4851689.0 0.0 0.0 27.311614332992548 0.9433749341342765 118.2763764450988 205.39404429515787 192.4306584596634 20108.0 39.272444665431976 405.72885352373123 114.99271645101418 0.1807781348962867 69.97938871717868 571.6115923516921 571.3082200586796 21002.0 452.9962001532316 692.0828685075045 71.95336352499645 1.2097531340465286 90.31664813841826 1009.8780921704935 1000.0000046491623 178124.0 863.9937826395035 1189.4063824415207 99.80371601794556 0.1680471213771674 0.4634107232994584 0.4858448244116643 0.38380923954498725
sub-51187_T1w 0.33882862703385835 3.9788123328856853 0.6068733970705135 5151.111153829724 3.5394978356516567 4.25443345257453 2.965211919410108 3.398848134970332 0.16404671986716 0.4935345249617393 0.3424187551711007 0.8655245900154114 0.6595467209815977 0.0 0.00010321100584340151 26.046560208106577 9.5216518888818 14.574225774565697 256 132 256 0.5218688172129379 7.963006853091069 7.5318240199687905 14.110596389602364 7.47460977086053 16.820254768325874 17.695897299272193 28.79282735863018 0.8593999743461609 1.5000007152557373 0.8593999743461609 212.55935560332208 0.0 7.965457888387402 0.0 2086165.0 0.0 47.412913989275694 22.753457905614738 3.8791461166165755 170.97945856656423 434.6798289708983 259.59970897994936 5336.0 45.10847321804613 1694.362357551232 497.3958473249557 0.004220801228355775 73.69218439433283 582.6507497875704 584.1821013651788 47874.0 459.6302606094629 698.9381590856239 73.36123286903359 0.6090564532154255 67.19881568745514 1002.0922130810112 999.9999775439501 138601.0 889.7433086000383 1121.053142476827 70.86846951393275 0.13692615298141483 0.47923963498730343 0.4788423111786441 0.340853963466229
146 changes: 59 additions & 87 deletions mriqc/workflows/anatomical/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -486,107 +486,79 @@ def headmsk_wf(name="HeadMaskWorkflow"):

"""

use_bet = config.workflow.headmask.upper() == "BET"
has_dipy = False

if not use_bet:
try:
from dipy.denoise import nlmeans # noqa

has_dipy = True
except ImportError:
pass

if not use_bet and not has_dipy:
raise RuntimeError("DIPY is not installed and ``config.workflow.headmask`` is not BET.")
from nipype.interfaces.dipy import Denoise

workflow = pe.Workflow(name=name)
inputnode = pe.Node(niu.IdentityInterface(fields=["in_file", "in_segm"]), name="inputnode")
outputnode = pe.Node(niu.IdentityInterface(fields=["out_file"]), name="outputnode")

if use_bet:
from nipype.interfaces.fsl import BET

# Alternative for when dipy is not installed
bet = pe.Node(BET(surfaces=True), name="bet")

# fmt: off
workflow.connect([
(inputnode, bet, [("in_file", "in_file")]),
(bet, outputnode, [('outskin_mask_file', "out_file")]),
])
# fmt: on

else:
from nipype.interfaces.dipy import Denoise

enhance = pe.Node(
enhance = pe.Node(
niu.Function(
input_names=["in_file"],
output_names=["out_file"],
function=_enhance,
),
name="Enhance",
)
estsnr = pe.Node(
niu.Function(
input_names=["in_file", "seg_file"],
output_names=["out_snr"],
function=_estimate_snr,
),
name="EstimateSNR",
)
denoise = pe.Node(Denoise(), name="Denoise")
gradient = pe.Node(
niu.Function(
input_names=["in_file", "snr", "sigma"],
output_names=["out_file"],
function=image_gradient,
),
name="Grad",
)
thresh = pe.Node(
niu.Function(
input_names=["in_file", "in_segm", "aniso", "thresh"],
output_names=["out_file"],
function=gradient_threshold,
),
name="GradientThreshold",
)
if config.workflow.species != "human":
calc_sigma = pe.Node(
niu.Function(
input_names=["in_file"],
output_names=["out_file"],
function=_enhance,
),
name="Enhance",
)
estsnr = pe.Node(
niu.Function(
input_names=["in_file", "seg_file"],
output_names=["out_snr"],
function=_estimate_snr,
),
name="EstimateSNR",
)
denoise = pe.Node(Denoise(), name="Denoise")
gradient = pe.Node(
niu.Function(
input_names=["in_file", "snr", "sigma"],
output_names=["out_file"],
function=image_gradient,
),
name="Grad",
)
thresh = pe.Node(
niu.Function(
input_names=["in_file", "in_segm", "aniso", "thresh"],
output_names=["out_file"],
function=gradient_threshold,
output_names=["sigma"],
function=sigma_calc,
),
name="GradientThreshold",
name="calc_sigma",
)
if config.workflow.species != "human":
calc_sigma = pe.Node(
niu.Function(
input_names=["in_file"],
output_names=["sigma"],
function=sigma_calc,
),
name="calc_sigma",
)
workflow.connect(
[
(inputnode, calc_sigma, [("in_file", "in_file")]),
(calc_sigma, gradient, [("sigma", "sigma")]),
]
)

thresh.inputs.aniso = True
thresh.inputs.thresh = 4.0

# fmt: off
workflow.connect([
(inputnode, estsnr, [("in_file", "in_file"),
("in_segm", "seg_file")]),
(estsnr, denoise, [("out_snr", "snr")]),
(inputnode, enhance, [("in_file", "in_file")]),
(enhance, denoise, [("out_file", "in_file")]),
(estsnr, gradient, [("out_snr", "snr")]),
(denoise, gradient, [("out_file", "in_file")]),
(inputnode, thresh, [("in_segm", "in_segm")]),
(gradient, thresh, [("out_file", "in_file")]),
(thresh, outputnode, [("out_file", "out_file")]),
(inputnode, calc_sigma, [("in_file", "in_file")]),
(calc_sigma, gradient, [("sigma", "sigma")]),
])
# fmt: on

thresh.inputs.aniso = True
thresh.inputs.thresh = 4.0

# fmt: off
workflow.connect([
(inputnode, estsnr, [("in_file", "in_file"),
("in_segm", "seg_file")]),
(estsnr, denoise, [("out_snr", "snr")]),
(inputnode, enhance, [("in_file", "in_file")]),
(enhance, denoise, [("out_file", "in_file")]),
(estsnr, gradient, [("out_snr", "snr")]),
(denoise, gradient, [("out_file", "in_file")]),
(inputnode, thresh, [("in_segm", "in_segm")]),
(gradient, thresh, [("out_file", "in_file")]),
(thresh, outputnode, [("out_file", "out_file")]),
])
# fmt: on

return workflow


Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ license = "Apache-2.0"
requires-python = ">=3.8"
dependencies = [
'importlib_resources; python_version < "3.9"',
"dipy <1.6,>=1.4.0",
# jinja2 imports deprecated function removed in 2.1
"markupsafe ~= 2.0.1",
"matplotlib",
Expand Down Expand Up @@ -64,7 +65,6 @@ doc = [
]

full = [
"dipy<=1.4.0", # see nipreps/mriqc#969; can be removed if nipype>1.7.0
"nitime",
"nirodents >= 0.2.8",
"scikit-learn ~= 1.0",
Expand Down
0