text
stringlengths
1
93.6k
if len(evaluator_list) == 0:
raise NotImplementedError(
"no Evaluator for the dataset {} with the type {}".format(
dataset_name, evaluator_type
)
)
elif len(evaluator_list) == 1:
return evaluator_list[0]
return DatasetEvaluators(evaluator_list)
@classmethod
def build_train_loader(cls, cfg):
dataset = None
mapper = None
# Semantic segmentation dataset mapper
if cfg.INPUT.DATASET_MAPPER_NAME == "mask_former_semantic":
mapper = MaskFormerSemanticDatasetMapper(cfg, True)
elif cfg.INPUT.DATASET_MAPPER_NAME == "mask_former_binary_semantic":
mapper = MaskFormerBinarySemanticDatasetMapper(cfg, True)
dataset = dataset_sample_per_class(cfg)
elif cfg.INPUT.DATASET_MAPPER_NAME == "mask_former_full_binary_semantic":
mapper = MaskFormerBinaryFullDatasetMapper(cfg, True)
dataset = dataset_sample_per_task_class(cfg)
# Panoptic segmentation dataset mapper
elif cfg.INPUT.DATASET_MAPPER_NAME == "mask_former_panoptic":
mapper = MaskFormerPanopticDatasetMapper(cfg, True)
return build_detection_train_loader(cfg, mapper=mapper)
# Instance segmentation dataset mapper
elif cfg.INPUT.DATASET_MAPPER_NAME == "mask_former_instance":
mapper = MaskFormerInstanceDatasetMapper(cfg, True)
return build_detection_train_loader(cfg, mapper=mapper)
elif cfg.INPUT.DATASET_MAPPER_NAME == "coco_full_lsj":
mapper = COCOFullTaskNewBaselineDatasetMapper(cfg, True)
return build_detection_train_loader(cfg, mapper=mapper)
return build_detection_train_loader(cfg, mapper=mapper, dataset=dataset)
@classmethod
def build_test_loader(cls, cfg, dataset_name):
"""
Returns:
iterable
It now calls :func:`detectron2.data.build_detection_test_loader`.
Overwrite it if you'd like a different data loader.
"""
if cfg.ORACLE:
if cfg.INPUT.DATASET_MAPPER_NAME == "mask_former_semantic":
mapper = MaskFormerSemanticDatasetMapper(cfg, False)
elif cfg.INPUT.DATASET_MAPPER_NAME == "mask_former_binary_semantic":
mapper = MaskFormerBinarySemanticDatasetMapper(cfg, False)
elif cfg.INPUT.DATASET_MAPPER_NAME == "mask_former_full_binary_semantic":
mapper = MaskFormerBinarySemanticDatasetMapper(cfg, False)
elif cfg.INPUT.DATASET_MAPPER_NAME == "propsoal_classification":
mapper = ProposalClasificationDatasetMapper(cfg, False)
else:
mapper = None
return build_detection_test_loader(cfg, dataset_name, mapper=mapper)
def build_writers(self):
"""
Build a list of writers to be used. By default it contains
writers that write metrics to the screen,
a json file, and a tensorboard event file respectively.
If you'd like a different list of writers, you can overwrite it in
your trainer.
Returns:
list[EventWriter]: a list of :class:`EventWriter` objects.
It is now implemented by:
::
return [
CommonMetricPrinter(self.max_iter),
JSONWriter(os.path.join(self.cfg.OUTPUT_DIR, "metrics.json")),
TensorboardXWriter(self.cfg.OUTPUT_DIR),
]
"""
# Here the default print/log frequency of each writer is used.
return [
# It may not always print what you want to see, since it prints "common" metrics only.
CommonMetricPrinter(self.max_iter),
JSONWriter(os.path.join(self.cfg.OUTPUT_DIR, "metrics.json")),
WandbWriter(),
]
@classmethod
def build_lr_scheduler(cls, cfg, optimizer):
"""
It now calls :func:`detectron2.solver.build_lr_scheduler`.
Overwrite it if you'd like a different scheduler.
"""
return build_lr_scheduler(cfg, optimizer)
@classmethod
def build_optimizer(cls, cfg, model):
weight_decay_norm = cfg.SOLVER.WEIGHT_DECAY_NORM
weight_decay_embed = cfg.SOLVER.WEIGHT_DECAY_EMBED
defaults = {}