|
|
import numpy as np
|
|
|
import argparse
|
|
|
import glob
|
|
|
import os
|
|
|
import sys
|
|
|
import torch
|
|
|
import cv2
|
|
|
import random
|
|
|
import time
|
|
|
import multiprocessing.pool as mpp
|
|
|
import multiprocessing as mp
|
|
|
SEED = 66
|
|
|
|
|
|
def seed_everything(seed):
|
|
|
random.seed(seed)
|
|
|
os.environ['PYTHONHASHSEED'] = str(seed)
|
|
|
np.random.seed(seed)
|
|
|
torch.manual_seed(seed)
|
|
|
torch.cuda.manual_seed(seed)
|
|
|
torch.backends.cudnn.deterministic = True
|
|
|
torch.backends.cudnn.benchmark = True
|
|
|
def label2rgb(mask, mask_pred):
|
|
|
real_1 = (mask == 1)
|
|
|
real_0 = (mask == 0)
|
|
|
pred_1 = (mask_pred == 1)
|
|
|
pred_0 = (mask_pred == 0)
|
|
|
|
|
|
TP = np.logical_and(real_1, pred_1)
|
|
|
TN = np.logical_and(real_0, pred_0)
|
|
|
FN = np.logical_and(real_1, pred_0)
|
|
|
FP = np.logical_and(real_0, pred_1)
|
|
|
|
|
|
mask_TP = TP[np.newaxis, :, :]
|
|
|
mask_TN = TN[np.newaxis, :, :]
|
|
|
mask_FN = FN[np.newaxis, :, :]
|
|
|
mask_FP = FP[np.newaxis, :, :]
|
|
|
|
|
|
h, w = mask.shape[0], mask.shape[1]
|
|
|
mask_rgb = np.zeros(shape=(h, w, 3), dtype=np.uint8)
|
|
|
mask_rgb[np.all(mask_TP, axis=0)] = [255, 255, 255]
|
|
|
mask_rgb[np.all(mask_TN, axis=0)] = [0, 0, 0]
|
|
|
mask_rgb[np.all(mask_FN, axis=0)] = [0, 255, 0]
|
|
|
mask_rgb[np.all(mask_FP, axis=0)] = [255, 0, 0]
|
|
|
|
|
|
return mask_rgb
|
|
|
|
|
|
def parse_args():
|
|
|
parser = argparse.ArgumentParser()
|
|
|
parser.add_argument("--dataset", default="Vaihingen")
|
|
|
parser.add_argument("--mask-dir", default="data/Test/masks")
|
|
|
parser.add_argument("--output-mask-dir", default="data/Test/masks_rgb")
|
|
|
return parser.parse_args()
|
|
|
|
|
|
def mask_save(inp):
|
|
|
(mask, mask_pred, masks_output_dir, file_name) = inp
|
|
|
out_mask_path = os.path.join(masks_output_dir, "{}.png".format(file_name))
|
|
|
|
|
|
label = label2rgb(mask.copy(), mask_pred.copy())
|
|
|
|
|
|
rgb_label = cv2.cvtColor(label, cv2.COLOR_BGR2RGB)
|
|
|
cv2.imwrite(out_mask_path, rgb_label)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|