text
stringlengths
1
93.6k
print ('test,train: ',len(dataset.dataset_files['test']), \
len(dataset.dataset_files['train']))
dataset.test_batch_size = 8
global network
network = LSCCNN(args, nofreeze=True, name='scale_4', output_downscale=4)
load_model_VGG16(network)
model_save_path = os.path.join(model_save_dir, 'train2')
if not os.path.exists(model_save_path):
os.makedirs(model_save_path)
os.makedirs(os.path.join(model_save_path, 'snapshots'))
train_networks(network=network,
dataset=dataset,
network_functions=networkFunctions(),
log_path=model_save_path)
print('\n-------\nDONE.')
if __name__ == '__main__':
args = parser.parse_args()
# -- Assign GPU
os.environ["CUDA_VISIBLE_DEVICES"] = str(args.gpu)
# -- Assertions
assert(args.dataset)
assert len(args.trained_model) in [0, 1]
# -- Setting seeds for reproducability
np.random.seed(11)
random.seed(11)
torch.manual_seed(11)
torch.backends.cudnn.enabled = False
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = False
torch.cuda.manual_seed(11)
torch.cuda.manual_seed_all(11)
# -- Dataset paths
if args.dataset == "parta":
dataset_paths = {'test': ['../dataset/ST_partA/test_data/images',
'../dataset/ST_partA/test_data/ground_truth'],
'train': ['../dataset/ST_partA/train_data/images',
'../dataset/ST_partA/train_data/ground_truth']}
validation_set = 30
path = '../dataset/stparta_dotmaps_predscale0.5_rgb_ddcnn++_test_val_30'
output_downscale = 2
elif args.dataset == "partb":
dataset_paths = {'test': ['../dataset/ST_partB/test_data/images',
'../dataset/ST_partB/test_data/ground_truth'],
'train': ['../dataset/ST_partB/train_data/images',
'../dataset/ST_partB/train_data/ground_truth']}
validation_set = 80
output_downscale = 2
path = "../dataset/stpartb_dotmaps_predscale0.5_rgb_ddcnn++_test/"
elif args.dataset == "ucfqnrf":
dataset_paths = {'test': ['../dataset/UCF-QNRF_ECCV18/Test/images',
'../dataset/UCF-QNRF_ECCV18/Test/ground_truth'],
'train': ['../dataset/UCF-QNRF_ECCV18/Train/images',
'../dataset/UCF-QNRF_ECCV18/Train/ground_truth']}
validation_set = 240
output_downscale = 2
path = '../dataset/qnrf_dotmaps_predictionScale_'+str(output_downscale)
model_save_dir = './models'
batch_size = args.batch_size
crop_size = 224
dataset = DataReader(path)
# -- Train the model
train()
# <FILESEP>
from os import environ
from os.path import abspath
from typing import Any
import fabric
def regen_apidoc(ctx: Any, src: str, dest: str, is_nspkg: bool = False) -> None:
ctx.run(f"rm -rf {dest}", replace_env=False, pty=True)
nsopt = " --implicit-namespaces" if is_nspkg else ""
if "READTHEDOCS" in environ:
cmd = "sphinx-apidoc"
else:
cmd = "./env/bin/sphinx-apidoc"
ctx.run(
f"{cmd} -o {dest} -f{nsopt} -e -M {src}",
replace_env=False,