Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 18 additions & 0 deletions conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,9 @@
import numpy as np
from phantoms.MR_XCAT_qMRI.sim_ivim_sig import phantom
import warnings
import os
import torch
import random
from tests.IVIMmodels.unit_tests.test_ivim_fit import PerformanceWarning
warnings.simplefilter("always", PerformanceWarning)

Expand Down Expand Up @@ -98,6 +101,21 @@ def pytest_addoption(parser):
help="Run MATLAB-dependent tests"
)

def set_global_seed(seed: int = 42):
os.environ["PYTHONHASHSEED"] = str(seed)
random.seed(seed)
np.random.seed(seed)
torch.manual_seed(seed)
torch.cuda.manual_seed(seed)
torch.cuda.manual_seed_all(seed)
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = False
print(f"✅ Global seed set to {seed}")

@pytest.fixture(autouse=True)
def global_seed():
"""Automatically seed all random generators at test session start."""
set_global_seed(42)

@pytest.fixture(scope="session")
def eng(request):
Expand Down
3 changes: 2 additions & 1 deletion src/standardized/IVIM_NEToptim.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,8 @@ def ivim_fit(self, signals, bvalues, **kwargs):
"""
if not np.array_equal(bvalues, self.bvalues):
raise ValueError("bvalue list at fitting must be identical as the one at initiation, otherwise it will not run")

if np.shape(np.shape(signals)) == (1,):
signals=signals[np.newaxis, :]
paramsNN = deep.predict_IVIM(signals, self.bvalues, self.net, self.arg)

results = {}
Expand Down
10 changes: 9 additions & 1 deletion src/standardized/Super_IVIM_DC.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,12 +99,20 @@ def ivim_fit(self, signals, bvalues, **kwargs):
"""
if not np.array_equal(bvalues, self.bvalues):
raise ValueError("bvalue list at fitting must be identical as the one at initiation, otherwise it will not run")

if np.shape(np.shape(signals)) == (1,):
signals=signals[np.newaxis, :]
Dp, Dt, f, S0_superivimdc = infer_from_signal(
signal=signals,
bvalues=self.bvalues,
model_path=f"{self.working_dir}/{self.super_ivim_dc_filename}.pt",
)
# fallback for empty arrays
if Dp.size == 0:
Dp = 0.0
if Dt.size == 0:
Dt = 0.0
if f.size == 0:
f = 0.0

results = {}
results["D"] = Dt
Expand Down
2 changes: 1 addition & 1 deletion tests/IVIMmodels/unit_tests/algorithms.json
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
{
"algorithms": [
"Super_IVIM_DC",
"IVIM_NEToptim",
"TCML_TechnionIIT_lsqlm",
"TCML_TechnionIIT_lsqtrf",
Expand All @@ -8,7 +9,6 @@
"TCML_TechnionIIT_lsq_sls_trf",
"TCML_TechnionIIT_lsq_sls_BOBYQA",
"TCML_TechnionIIT_SLS",
"Super_IVIM_DC",
"ASD_MemorialSloanKettering_QAMPER_IVIM",
"ETP_SRI_LinearFitting",
"IAR_LU_biexp",
Expand Down
3,834 changes: 2,052 additions & 1,782 deletions tests/IVIMmodels/unit_tests/reference_output.csv

Large diffs are not rendered by default.

16 changes: 12 additions & 4 deletions tests/IVIMmodels/unit_tests/test_ivim_synthetic.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@
from src.wrappers.OsipiBase import OsipiBase
from utilities.data_simulation.GenerateData import GenerateData

TRAINED_MODELS = {}

#run using pytest <path_to_this_file> --saveFileName test_output.txt --SNR 50 100 200
#e.g. pytest -m slow tests/IVIMmodels/unit_tests/test_ivim_synthetic.py --saveFileName test_output.csv --SNR 10 50 100 200 --fitCount 20
@pytest.mark.slow
Expand All @@ -15,9 +17,6 @@ def test_generated(algorithmlist, ivim_data, SNR, rtol, atol, fit_count, rician_
ivim_algorithm, requires_matlab, deep_learning = algorithmlist
if requires_matlab and eng is None:
pytest.skip(reason="Running without matlab; if Matlab is available please run pytest --withmatlab")
if deep_learning:
pytest.skip(
reason="Slow drifting in performance not yet implmented for deep learning algorithms") # requieres training a network per b-value set and inferencing all data in 1 go. So not 1 data point per time, but all data in 1 go :). Otherwise network will be trained many many times...
rng = np.random.RandomState(42)
# random.seed(42)
S0 = 1
Expand All @@ -26,7 +25,16 @@ def test_generated(algorithmlist, ivim_data, SNR, rtol, atol, fit_count, rician_
D = data["D"]
f = data["f"]
Dp = data["Dp"]
fit = OsipiBase(algorithm=ivim_algorithm)
if deep_learning:
if ivim_algorithm+str(SNR) not in TRAINED_MODELS:
print(f"Training deep learning model {ivim_algorithm} ...")
fit = OsipiBase(bvalues=bvals, algorithm=ivim_algorithm,SNR=SNR)
TRAINED_MODELS[ivim_algorithm+str(SNR)] = fit
else:
print(f"Reusing trained model {ivim_algorithm}")
fit = TRAINED_MODELS[ivim_algorithm+str(SNR)]
else:
fit = OsipiBase(algorithm=ivim_algorithm)
# here is a prior
if use_prior and hasattr(fit, "supported_priors") and fit.supported_priors:
prior = [rng.normal(D, D/3, 10), rng.normal(f, f/3, 10), rng.normal(Dp, Dp/3, 10), rng.normal(1, 1/3, 10)]
Expand Down
3 changes: 1 addition & 2 deletions utilities/data_simulation/GenerateData.py
Original file line number Diff line number Diff line change
Expand Up @@ -192,15 +192,14 @@ def simulate_training_data(self, bvalues, SNR = (5,100), n = 1000000, Drange = (
#data_sim = np.zeros([len(D), len(bvalues)])
bvalues = np.array(bvalues)
if type(SNR) == tuple:
noise_std = 1/SNR[1] + test[:,3] * (1/SNR[0] - 1/SNR[1])
noise_std = (1 / SNR[1] + test[:, 3] * (1 / SNR[0] - 1 / SNR[1]))[:, None]
addnoise = True
elif SNR == 0:
addnoise = False
noise_std = np.ones((n, 1))
else:
noise_std = np.full((n, 1), 1/SNR)
addnoise = True
noise_std = noise_std[:, np.newaxis]
# loop over array to fill with simulated IVIM data
bvalues = np.array(bvalues).reshape(1, -1)
data_sim = 1 * (f * np.exp(-bvalues * Dp) + (1 - f) * np.exp(-bvalues * D))
Expand Down
Loading