Skip to content

Commit

Permalink
fix: tests
Browse files Browse the repository at this point in the history
  • Loading branch information
nanoxas committed Oct 28, 2024
1 parent c745ab3 commit c4f8e54
Show file tree
Hide file tree
Showing 8 changed files with 134 additions and 82 deletions.
4 changes: 3 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,8 @@ venv/
# Data & models
data/
models/
.neptune/*
outputs/*

# Exclude 'imnet_sample' from being ignored
!data/imnet_sample/
!data/imnet_sample/
16 changes: 8 additions & 8 deletions configs/biosr.yaml
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
task: "biosr_sr"

model:
noise_model_type: "unet"
noise_model_type: "sr3"
alpha: 0.001
load_weights: null
load_weights: '/home/gabriel/Documents/p_code/cvdm/outputs/biosr/weights/model_0_4e9fa08f-beb8-4335-907f-32bbe3f70a72.h5'
load_mu_weights: null
snr_expansion_n: 1
zmd: False
diff_inp: False
diff_inp: True

training:
lr: 0.0001
Expand All @@ -16,18 +16,18 @@ training:
eval:
output_path: "outputs/biosr"
generation_timesteps: 200
checkpoint_freq: 1000
checkpoint_freq: 10000
log_freq: 10
image_freq: 100
image_freq: 1000
val_freq: 200
val_len: 100

data:
dataset_path: "/bigdata/casus/MLID/maria/biosr_sample"
dataset_path: "/media/gabriel/data_hdd/biosr_dataset/train/biosr_ds.npz"
n_samples: 100
batch_size: 2
batch_size: 4
im_size: 256

neptune:
name: "CVDM"
project: "mlid/test"
project: "mlid/testing-cvdm"
91 changes: 47 additions & 44 deletions cvdm/utils/inference_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,16 +9,16 @@
from tensorflow.keras.models import Model
from tqdm import tqdm

from cvdm.utils.metrics_utils import calculate_metrics



def ddpm_obtain_sr_img(
x: np.ndarray,
timesteps_test: int,
noise_model: Model,
schedule_model: Model,
mu_model: Optional[Model],
out_shape: Optional[Tuple[int, ...]] = None,
x: np.ndarray,
timesteps_test: int,
noise_model: Model,
schedule_model: Model,
mu_model: Optional[Model],
out_shape: Optional[Tuple[int, ...]] = None,
) -> Tuple[np.ndarray, np.ndarray, np.ndarray]:
if out_shape == None:
out_shape = x.shape
Expand Down Expand Up @@ -51,9 +51,9 @@ def ddpm_obtain_sr_img(
beta_factor = (1 - gamma_tm1) * beta_t / (1 - gamma_t)
if count > 0:
pred_sr = (
np.sqrt(gamma_t) * pred_sr
+ np.sqrt(1 - gamma_t - beta_factor) * pred_noise
+ np.sqrt(beta_factor) * z
np.sqrt(gamma_t) * pred_sr
+ np.sqrt(1 - gamma_t - beta_factor) * pred_noise
+ np.sqrt(beta_factor) * z
)
if mu_model is not None:
pred_noise = noise_model.predict([pred_sr, x, mu_pred, gamma_t], verbose=0)
Expand All @@ -70,10 +70,10 @@ def ddpm_obtain_sr_img(


def create_output_montage(
pred_y: np.ndarray,
gamma_vec: np.ndarray,
y: np.ndarray,
x: Optional[np.ndarray],
pred_y: np.ndarray,
gamma_vec: np.ndarray,
y: np.ndarray,
x: Optional[np.ndarray],
) -> np.ndarray:
if pred_y.shape[3] > 1:
channel_axis = 3
Expand Down Expand Up @@ -125,7 +125,7 @@ def log_loss(run: Optional[Run], avg_loss: np.ndarray, prefix: str) -> None:


def log_metrics(
run: Optional[Run], metrics_dict: Dict[str, float], prefix: str
run: Optional[Run], metrics_dict: Dict[str, float], prefix: str
) -> None:
if run is not None:
for metric_name, metric_value in metrics_dict.items():
Expand All @@ -137,25 +137,25 @@ def log_metrics(


def save_weights(
run: Optional[Run],
model: Model,
mu_model: Optional[Model],
step: int,
output_path: str,
run_id: str,
run: Optional[Run],
model: Model,
mu_model: Optional[Model],
step: int,
output_path: str,
run_id: str,
) -> None:
weights_dir = f"{output_path}/weights"
os.makedirs(weights_dir, exist_ok=True)

model_weights_path = f"{weights_dir}/model_{str(step)}_{run_id}.h5"
model.save_weights(model_weights_path, True)
model.save_weights(model_weights_path)

if run is not None:
run[f"model_weights/model_{str(step)}.h5"].upload(model_weights_path)

if mu_model is not None:
mu_model_weights_path = f"{weights_dir}/mu_model_{str(step)}_{run_id}.h5"
mu_model.save_weights(mu_model_weights_path, True)
mu_model.save_weights(mu_model_weights_path)

if run is not None:
run[f"mu_model_weights/mu_model_{str(step)}.h5"].upload(
Expand All @@ -164,13 +164,13 @@ def save_weights(


def save_output_montage(
run: Optional[Run],
output_montage: np.ndarray,
step: int,
output_path: str,
run_id: str,
prefix: str,
cmap: Optional[str] = None,
run: Optional[Run],
output_montage: np.ndarray,
step: int,
output_path: str,
run_id: str,
prefix: str,
cmap: Optional[str] = None,
) -> None:
output_dir = f"{output_path}/images"
os.makedirs(output_dir, exist_ok=True)
Expand All @@ -186,16 +186,15 @@ def save_output_montage(


def obtain_output_montage_and_metrics(
batch_x: np.ndarray,
batch_y: np.ndarray,
noise_model: Model,
schedule_model: Model,
mu_model: Optional[Model],
generation_timesteps: int,
diff_inp: bool,
task: str,
) -> Tuple[np.ndarray, Dict]:

batch_x: np.ndarray,
batch_y: np.ndarray,
noise_model: Model,
schedule_model: Model,
mu_model: Optional[Model],
generation_timesteps: int,
diff_inp: bool,
task: str,
) -> np.ndarray:
pred_diff, gamma_vec, _ = ddpm_obtain_sr_img(
batch_x,
generation_timesteps,
Expand All @@ -205,11 +204,15 @@ def obtain_output_montage_and_metrics(
batch_y.shape,
)
if diff_inp:
pred_y = np.clip(pred_diff + batch_x, -1, 1)
pred_y = pred_diff + batch_x
else:
pred_y = pred_diff

if task == 'imagenet_sr':
pred_y = np.clip(pred_y, -1, 1)
else:
pred_y = np.clip(pred_diff, -1, 1)
pred_y = np.clip(pred_y, -1, 1)

metrics = calculate_metrics(pred_y, batch_y)
if task in ["biosr_sr", "imagenet_sr"]:
gamma_vec = np.clip(gamma_vec[..., generation_timesteps // 2], -1, 1)
montage_x = batch_x
Expand All @@ -224,4 +227,4 @@ def obtain_output_montage_and_metrics(
)
if task in ["biosr_sr", "imagenet_sr"]:
output_montage = (output_montage * 127.5 + 127.5).astype(np.uint8)
return output_montage, metrics
return output_montage
55 changes: 55 additions & 0 deletions cvdm/utils/metrics_utils.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
from typing import Dict, Optional

import numpy as np
from skimage.metrics import peak_signal_noise_ratio, structural_similarity
#TODO: make the metrics calculations depend on the experiment. this file is a placeholder.

def nmae(y_pred: np.ndarray, y_real: np.ndarray) -> float:
nmae: float = np.sqrt(np.sum((y_pred - y_real) ** 2)) / np.sqrt(np.sum(y_real**2))
return nmae


def calculate_metrics(
y_pred_batch: np.ndarray,
y_real_batch: np.ndarray,
) -> Dict[str, float]:
y_pred_batch = np.array(y_pred_batch)
y_real_batch = np.array(y_real_batch)

if y_pred_batch.shape[3] > 1:
channel_axis = 2
else:
channel_axis = None

metrics = {
"mse": np.mean((y_pred_batch - y_real_batch) ** 2),
"mape": np.mean(np.abs((y_real_batch - y_pred_batch) / y_real_batch + 1e-10))
* 100,
"nmae": np.mean(
[
np.mean(np.abs(y_pred - y_real)) / np.mean(np.abs(y_real) + 1e-10)
for y_pred, y_real in zip(y_pred_batch, y_real_batch)
]
),
"psnr": np.mean(
[
peak_signal_noise_ratio(
np.squeeze(y_pred), np.squeeze(y_real), data_range=2
)
for y_pred, y_real in zip(y_pred_batch, y_real_batch)
]
),
"ssim": np.mean(
[
structural_similarity(
np.squeeze(y_pred),
np.squeeze(y_real),
data_range=2,
channel_axis=channel_axis,
)
for y_pred, y_real in zip(y_pred_batch, y_real_batch)
]
),
}

return metrics
20 changes: 9 additions & 11 deletions cvdm/utils/training_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,23 +7,25 @@

from cvdm.configs.data_config import DataConfig
from cvdm.data.image_dir_dataloader import ImageDirDataloader
from cvdm.data.npy_dataloader import NpyDataloader
from cvdm.data.biosr_dataloader import BioSRDataloader
from cvdm.data.phase_2shot_dataloader import Phase2ShotDataloader
from cvdm.data.phase_polychrome_dataloader import PhasePolychromeDataloader


def prepare_dataset(
task: str, data_config: DataConfig, training: bool
task: str, data_config: DataConfig, training: bool
) -> Tuple[tf.data.Dataset, tf.TensorShape, tf.TensorShape]:
dataloader: Callable[
[], Iterator[Tuple[np.ndarray[Any, Any], np.ndarray[Any, Any]]]
]
if task == "biosr_sr":
dataloader = NpyDataloader(
dataloader = BioSRDataloader(
path=data_config.dataset_path,
n_samples=data_config.n_samples,
im_size=data_config.im_size,
)
x_channels = 1
y_channels = x_channels

elif task == "imagenet_sr":
dataloader = ImageDirDataloader(
Expand Down Expand Up @@ -53,12 +55,8 @@ def prepare_dataset(
x_channels = 2
y_channels = 1
elif task == "other":
dataloader = NpyDataloader(
path=data_config.dataset_path,
n_samples=data_config.n_samples,
im_size=data_config.im_size,
)
x_channels, y_channels = dataloader.get_channels()
print('Experiment not found')
raise NotImplementedError()
else:
raise NotImplementedError()

Expand All @@ -76,7 +74,7 @@ def prepare_dataset(


def prepare_model_input(
x: np.ndarray, y: np.ndarray, diff_inp: bool = False
x: np.ndarray, y: np.ndarray, diff_inp: bool = False
) -> List[np.ndarray]:
if diff_inp:
dfy = y - x
Expand All @@ -88,7 +86,7 @@ def prepare_model_input(


def train_on_batch_cvdm(
batch_x: np.ndarray, batch_y: np.ndarray, joint_model: Model, diff_inp: bool = False
batch_x: np.ndarray, batch_y: np.ndarray, joint_model: Model, diff_inp: bool = False
) -> np.ndarray:
model_input = prepare_model_input(batch_x, batch_y, diff_inp)
loss = joint_model.train_on_batch(model_input, np.zeros_like(batch_y))
Expand Down
21 changes: 7 additions & 14 deletions eval.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,8 +69,12 @@ def main() -> None:
model_config=model_config,
)
if model_config.load_weights is not None:
print('weights loaded...')
joint_model.load_weights(model_config.load_weights)
else:
print('no weights loaded...')
if model_config.load_mu_weights is not None and mu_model is not None:
print('weights loaded...')
mu_model.load_weights(model_config.load_mu_weights)

run = None
Expand All @@ -88,21 +92,19 @@ def main() -> None:
cumulative_loss = np.zeros(5)
run_id = str(uuid.uuid4())
step = 0
cumulative_metrics: Dict[str, float] = defaultdict(float)
total_samples = 0

for batch in dataset:
batch_x, batch_y = batch

cmap = (
"gray" if task in ["biosr_phase", "imagenet_phase", "hcoco_phase"] else None
"gray" if task in ["biosr_sr", "biosr_phase", "imagenet_phase", "hcoco_phase"] else None
)
model_input = prepare_model_input(batch_x, batch_y, diff_inp=diff_inp)
cumulative_loss += joint_model.evaluate(
model_input, np.zeros_like(batch_y), verbose=0
)

output_montage, metrics = obtain_output_montage_and_metrics(
# TODO: add meaningful metrics for each experiment.
output_montage = obtain_output_montage_and_metrics(
batch_x,
batch_y.numpy(),
noise_model,
Expand All @@ -112,17 +114,8 @@ def main() -> None:
diff_inp,
task,
)
for metric_name, metric_value in metrics.items():
cumulative_metrics[metric_name] += metric_value * batch_size
total_samples += batch_size
step += 1

average_metrics = {
metric_name: total / total_samples
for metric_name, total in cumulative_metrics.items()
}

log_metrics(run, average_metrics, prefix="val")
save_output_montage(
run=run,
output_montage=output_montage,
Expand Down
Loading

0 comments on commit c4f8e54

Please sign in to comment.