Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
30 changes: 30 additions & 0 deletions linumpy/microscope/oct.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,6 +102,36 @@ def load_image(self, crop: bool = True, galvo_shift: Union[bool, int] = False, c
vol = vol[:, 0:n_alines, 0:n_bscans]
return vol

def load_fringe(self, crop: bool = True):
# Create numpy array
# n_avg = self.info['n_repeat'] # TODO: use the number of averages when loading the data
n_alines = self.info['nx']
n_bscans = self.info['ny']
n_extra = self.info['n_extra']
n_alines_per_bscan = n_alines + n_extra
n_z = 2048

# Load the fringe
files = list(self.directory.rglob("fringe_*.bin"))
files.sort()
vol = None
for file in files:
with open(file, "rb") as f:
foo = np.fromfile(f, dtype=np.uint16)
n_frames = int(len(foo) / (n_alines_per_bscan * n_z))
foo = np.reshape(foo, (n_z, n_alines_per_bscan, n_frames), order='F')
if vol is None:
vol = foo
else:
vol = np.concatenate((vol, foo), axis=2)

# Crop the volume
if crop:
vol = vol[:, 0:n_alines, 0:n_bscans]


return vol

def detect_galvo_shift(self, vol: np.ndarray = None) -> int:
"""Detect the galvo shift necessary to place the galvo return at the end of the bscans stack"""
if vol is None:
Expand Down
108 changes: 108 additions & 0 deletions scripts/dev/dev_convert_data_for_bouma.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,108 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-

"""Prepare the data for the OCT k-space linearization and dispersion calibration."""

import argparse
from pathlib import Path

import matplotlib.pyplot as plt
import numpy as np
from scipy.io import savemat
from skimage.filters import threshold_li
from tqdm.auto import tqdm

from linumpy.microscope.oct import OCT

# Parameters
# directory = Path(r"D:\joel\2024-11-15-SOCT-UQAM-Calibration-Dispersion-Air-Mirror-woWindow-10x-higherContrast")

def _build_arg_parser():
p = argparse.ArgumentParser(
description=__doc__, formatter_class=argparse.RawTextHelpFormatter)
p.add_argument("directory",
help="Full path to a directory containing multiple OCT acquisition of a reference mirror at multiple depths.")
p.add_argument("basename", default="mirror_z",
help="OCT base name, used to detect the files to process (default=%(default)s)")

return p
def main():
# Parse arguments
p = _build_arg_parser()
args = p.parse_args()

# Parameters
directory = Path(args.directory)
basename = args.basename
spectrum_margin = 100

fringes_files = list(directory.glob(f"{basename}*"))
print(f"There are {len(fringes_files)} fringes files to process")

# Detect the file number
fringes_files.sort()
fringes_ids = [int(f.stem.split("_")[-1]) for f in fringes_files]

# Load the first fringe and determine the size of the data
oct = OCT(fringes_files[0])
galvo_return = oct.info['n_extra']
fringe = oct.load_fringe()
n_samples = fringe.shape[0]
n_alines = fringe.shape[1]
n_depths = len(fringes_files)
data = np.zeros((n_samples, n_alines, n_depths), dtype=fringe.dtype)
print(f"The dataset will have a shape of {data.shape}")

# Load all the data
for i, file in tqdm(zip(fringes_ids, fringes_files), total=n_depths):
oct = OCT(file)
fringe = oct.load_fringe()
data[..., i] = fringe[..., 0]

# Removing galvo return
data = data[:, 0:-galvo_return, ...]

# Detect the min/max wavelentgh with signal, and propose cropping
fringe_avg = data.mean(axis=(1, 2))
mask = fringe_avg > threshold_li(fringe_avg)
low_pt_otsu = max(np.where(mask)[0][0] - spectrum_margin, 0)
high_pt_otsu = min(np.where(mask)[0][-1] + spectrum_margin, n_samples)

figure_filename = directory / "spectrum.png"
plt.title(f"Detected spectrum from {low_pt_otsu} to {high_pt_otsu} ({high_pt_otsu - low_pt_otsu} samples)")
plt.plot(data.mean(axis=(1, 2)), label="Average fringe")
plt.axvspan(0, low_pt_otsu, color='r', alpha=0.5, label="Cropping region")
plt.axvspan(high_pt_otsu, n_samples, color='r', alpha=0.5)
plt.xlabel("Wavelength (px)")
plt.ylabel("Intensity")
plt.legend()
plt.savefig(figure_filename, dpi=300, pad_inches=0, bbox_inches='tight')
plt.show()

# Removing the background for each bscan
data_wo_bg = np.zeros_like(data, dtype=np.float64)
for i in range(data.shape[2]):
data_wo_bg[..., i] = data[..., i] - data[..., i].mean(axis=1, keepdims=True)

# Perform a quick reconstruction
window = np.hanning(data_wo_bg.shape[0]).reshape((data.shape[0], 1, 1))
bscans = np.abs(np.log(np.fft.fft(data_wo_bg * window, axis=0)))
bscans = bscans[0: n_samples // 2, :]

# Save the figure.
figure_filename = directory / "mirror_woCalibration.png"
plt.imshow(bscans.mean(axis=1), aspect="auto")
plt.xlabel("Mirror depth")
plt.ylabel("B-Scan Depth")
plt.title("Tomogram")
plt.savefig(figure_filename, dpi=300, bbox_inches='tight', pad_inches=0)
plt.show()

# Export the fringes as a matlab file
output = {'fringes': {'ch1': data_wo_bg.astype(np.float32)}, 'omitBscansVec': []}
output_file = directory / 'fringes.mat'
savemat(output_file, output)


if __name__ == "__main__":
main()
85 changes: 85 additions & 0 deletions scripts/dev/dev_export_calibration_bouma.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,85 @@
# Exports the calibration files required by our OCTto be used with PolyMtl/UQAM OCTs
from datetime import datetime
from pathlib import Path

from matplotlib import pyplot as plt
from scipy.io import loadmat
import numpy as np
import json

directory = Path("C://Users//Joël//OneDrive - UQAM//Documents//MATLAB//RobustMapping//Output//Results")
output_directory = Path("C://Users//Public//Documents")
result_file = directory / "UQAM-SOCT-20241115-Simplex-cropped.mat"

name = "2024-11-29-air-10x"
kmin = 852
kmax = 1620
n_samples = kmax - kmin
oversampling_factor = 8
n_samples_kspace = oversampling_factor * n_samples
n_samples_output = 1024

# Load the calibration optimization results
calibration = loadmat(result_file)

# Process the k-space mapping
kspace_mapping = calibration["map"].squeeze()
kspace_mapping = kspace_mapping / max(kspace_mapping) * n_samples_kspace # Adapting to the oversampling factor
kspace_interp_matrix = np.zeros((n_samples//2, n_samples_kspace))
for i in range(n_samples_kspace):
x = np.arange(n_samples_kspace)
y = np.zeros((n_samples_kspace,))
y[i] = 1
x_new = kspace_mapping

kspace_interp_matrix[:, i] = np.interp(x_new, x, y)

# Process the dispersion
dispersion = calibration["dispFit"].squeeze()
x = np.linspace(-1, 1, len(dispersion))
x_new = np.linspace(-1, 1, n_samples//2)
dispersion = np.interp(x_new, x, dispersion)

# Prepare the window
window = np.hanning(n_samples//2)

# Prepare the output
figure_filename = output_directory / f"calibration_graphs_{name}.png"
fig, axes = plt.subplots(nrows=1, ncols=3, figsize=[12,4])
axes[0].plot(kspace_mapping)
axes[0].set_title("K-Space Mapping")
axes[1].plot(dispersion)
axes[1].set_title("Dispersion")
axes[2].plot(window)
axes[2].set_title("Window")
plt.savefig(figure_filename, dpi=300, pad_inches=0, bbox_inches='tight')


# Write the calibration files
window_filename = output_directory / f"filter_{name}.dat"
with open(window_filename, "wb") as f:
window.astype(np.float64).tofile(f)

dispersion_filename = output_directory / f"phase_{name}.dat"
with open(dispersion_filename, "wb") as f:
dispersion.astype(np.float64).tofile(f)

kspace_interp_matrix_filename = output_directory / f"kspace_interp_matrix_{name}.dat"
with open(kspace_interp_matrix_filename, "wb") as f:
kspace_interp_matrix.astype(np.float64).tofile(f)

config = {}
config["name"] = name
config["kmin"] = kmin
config["kmax"] = kmax
config["oversampling_factor"] = oversampling_factor
config["n_samples_kspace"] = n_samples_kspace
config["n_samples_output"] = n_samples_output
config["window_filename"] = str(window_filename)
config["dispersion_filename"] = str(dispersion_filename)
config["kspace_interp_matrix_filename"] = str(kspace_interp_matrix_filename)
config["calib_graph_filename"] = str(figure_filename)
config["date"] = datetime.now().strftime("%Y-%m-%d@%Hh%M:%Ss")
config_filename = output_directory / f"config_{name}.json"
with open(config_filename, "w") as f:
json.dump(config, f)
Loading