Skip to content Skip to sidebar Skip to footer

Downsampling Large 3d Image In Numpy

I need to downsample large 3D images (30GB +) that are composed of a series of 2d tiff slices by arbitrary non-interger factors. scipy.ndimage.zoom works well for input images that

Solution 1:

So I worked out what to do by looking at ImageJ source code. I posted it here in case it helps anyone else:

import SimpleITK as sitk
import cv2
import numpy as np

def downsample_large_volume(img_path_list, input_voxel_size, output_voxel_size):

    scale = input_voxel_size / output_voxel_size
    resampled_zs = []

    #Resample z slices
    for img_path in img_path_list:
        z_slice_arr = cv2.imread(img_path, cv2.CV_LOAD_IMAGE_GRAYSCALE)
        z_slice_resized = cv2.resize(z_slice_arr, (0, 0), fx=scale, fy=scale, interpolation=cv2.INTER_AREA)
        resampled_zs.append(z_slice_resized) # Or save to disk to save RAM and use np.memmap for xz scaling


    temp_arr = np.dstack(resampled_zs)  # We seem to be in yxz space now
    final_scaled_slices = []

    # Resample xz plane at each y
    for y in range(temp_arr.shape[0]):
        xz_pane = temp_arr[y, :, :]
        scaled_xz = cv2.resize(xz_pane, (0, 0), fx=scale, fy=1, interpolation=cv2.INTER_AREA)
        final_scaled_slices.append(scaled_xz)

    final_array = np.dstack(final_scaled_slices)


    img = sitk.GetImageFromArray(np.swapaxes(np.swapaxes(final_array, 0, 1), 1, 2))
    sitk.WriteImage(img, 'scaled_by_pixel.nrrd')

Post a Comment for "Downsampling Large 3d Image In Numpy"