import os
import cv2
from google.colab import drive
drive.mount('/content/gdrive')
def main():
depth_dir = 'gdrive/MyDrive/depth/'
depth_prefix = 'Depth_'
for f in os.listdir(in_dir):
filename = f.split(".")[0]
print("=== Start processing:", filename, "===")
depth_src = cv2.imread(os.path.join(depth_dir, depth_prefix + filename + ".jpg"), cv2.IMREAD_GRAYSCALE)
depth_min = depth_src.min()
depth_max = depth_src.max()
depth = (depth_src - depth_min) / (depth_max - depth_min)
depth = np.round(depth*255).astype(int)
cv2.imwrite(os.path.join(depth_dir, "depth_" + filename + ".jpg"), right_fix)
if __name__ == "__main__":
main()
Hi, arundhati87
Multithreading doesn’t really help speed up jobs that consume CPU time.
Multiprocessing will do, for example, if you have quad-core CPU,
from concurrent.futures import ProcessPoolExecutor
def main():
with ProcessPoolExecutor(max_workers=4) as pool:
retvals = pool.map(your_job, iterables)
print(list(retvals))
where iterables
is your filename list to input.
Please also try to check how the multithreading works substituting the header by
from concurrent.futures import ThreadPoolExecutor
(Sorry for that I cannot answer about GPU)