text
stringlengths 1
93.6k
|
|---|
def tensor2im(input_image, imtype=np.uint8):
|
""""Converts a Tensor array into a numpy image array.
|
Parameters:
|
input_image (tensor) -- the input image tensor array
|
imtype (type) -- the desired type of the converted numpy array
|
"""
|
if not isinstance(input_image, np.ndarray):
|
if isinstance(input_image, torch.Tensor): # get the data from a variable
|
image_tensor = input_image.data
|
else:
|
return input_image
|
if image_tensor.dim() == 4:
|
image_numpy = ((image_tensor[0]+1.0)/2.0).clamp(0,1).cpu().float().numpy()
|
else:
|
image_numpy = ((image_tensor+1.0)/2.0).clamp(0,1).cpu().float().numpy() # convert it into a numpy array
|
if image_numpy.shape[0] == 1: # grayscale to RGB
|
image_numpy = np.tile(image_numpy, (3, 1, 1))
|
image_numpy = np.transpose(image_numpy, (1, 2, 0)) * 255 # post-processing: tranpose and scaling
|
else: # if it is a numpy array, do nothing
|
image_numpy = input_image
|
return image_numpy.astype(imtype)
|
def tensor2array(value_tensor):
|
"""Converts a Tensor array into a numpy
|
:param value_tensor:
|
:return:
|
"""
|
if value_tensor.dim() == 3:
|
numpy = value_tensor.view(-1).cpu().float().numpy()
|
else:
|
numpy = value_tensor[0].view(-1).cpu().float().numpy()
|
return numpy
|
def save_image(image_numpy, image_path):
|
"""Save a numpy image to the disk
|
Parameters:
|
image_numpy (numpy array) -- input numpy array
|
image_path (str) -- the path of the image
|
"""
|
if image_numpy.shape[2] == 1:
|
image_numpy = image_numpy.reshape(image_numpy.shape[0], image_numpy.shape[1])
|
imageio.imwrite(image_path, image_numpy)
|
def mkdirs(paths):
|
"""create empty directories if they don't exist
|
Parameters:
|
paths (str list) -- a list of directory paths
|
"""
|
if isinstance(paths, list) and not isinstance(paths, str):
|
for path in paths:
|
mkdir(path)
|
else:
|
mkdir(paths)
|
def mkdir(path):
|
"""create a single empty directory if it didn't exist
|
Parameters:
|
path (str) -- a single directory path
|
"""
|
if not os.path.exists(path):
|
os.makedirs(path)
|
# <FILESEP>
|
# Common libs
|
import numpy as np
|
import multiprocessing as mp
|
import os, sys, time, glob, pickle, psutil, argparse, importlib
|
sys.path.insert(0, f'{os.getcwd()}')
|
# Custom libs
|
from config import load_config, log_config
|
from utils.logger import print_mem, redirect_io
|
from config.utils import get_snap
|
def get_last_train(cfg):
|
saving_path = sorted(glob.glob(f'results/{cfg.dataset.lower()}/{cfg.name}/*'))
|
return saving_path[-1] if saving_path else None
|
parser = argparse.ArgumentParser()
|
parser.add_argument('-c', '--cfg_path', type=str, help='config path')
|
parser.add_argument('--gpus', type=str, default=None, help='the number/ID of GPU(s) to use [default: 1], 0 to use cpu only')
|
parser.add_argument('--mode', type=str, default=None, help='options: train, val, test')
|
parser.add_argument('--seed', type=int, default=None, dest='rand_seed', help='random seed for use')
|
parser.add_argument('--data_path', type=str, default=None, help='path to dataset dir = data_path/dataset_name')
|
parser.add_argument('--model_path', type=str, default=None, help='pretrained model path')
|
parser.add_argument('--saving_path', type=str, default=None, help='specified saving path')
|
parser.add_argument('--num_votes', type=float, default=None, help='least num of votes of each point (default to 30)')
|
parser.add_argument('--num_threads', type=lambda n: mp.cpu_count() if n == 'a' else int(n) if n else None, default=None, help='the number of cpu to use for data loading')
|
parser.add_argument('--set', type=str, help='external source to set the config - str of dict / yaml file')
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.