text
stringlengths
1
93.6k
INTERPOLATION_MODE_MAP = {
"nearest": T.InterpolationMode.NEAREST,
"bilinear": T.InterpolationMode.BILINEAR,
"bicubic": T.InterpolationMode.BICUBIC,
"cubic": T.InterpolationMode.BICUBIC,
"box": T.InterpolationMode.BOX,
"hamming": T.InterpolationMode.HAMMING,
"lanczos": T.InterpolationMode.LANCZOS,
}
class AutoAugment(T.AutoAugment):
"""Extend PyTorch's AutoAugment to init from a policy and an interpolation name."""
def __init__(
self, policy: str = "imagenet", interpolation: str = "bilinear", *args, **kwargs
) -> None:
"""Init from an policy and interpolation name."""
if "cifar" in policy.lower():
policy = T.AutoAugmentPolicy.CIFAR10
elif "svhn" in policy.lower():
policy = T.AutoAugmentPolicy.SVHN
else:
policy = T.AutoAugmentPolicy.IMAGENET
interpolation = INTERPOLATION_MODE_MAP[interpolation]
super().__init__(*args, policy=policy, interpolation=interpolation, **kwargs)
class RandAugment(T.RandAugment):
"""Extend PyTorch's RandAugment to init from an interpolation name."""
def __init__(self, interpolation: str = "bilinear", *args, **kwargs) -> None:
"""Init from an interpolation name."""
interpolation = INTERPOLATION_MODE_MAP[interpolation]
super().__init__(*args, interpolation=interpolation, **kwargs)
class TrivialAugmentWide(T.TrivialAugmentWide):
"""Extend PyTorch's TrivialAugmentWide to init from an interpolation name."""
def __init__(self, interpolation: str = "bilinear", *args, **kwargs) -> None:
"""Init from an interpolation name."""
interpolation = INTERPOLATION_MODE_MAP[interpolation]
super().__init__(*args, interpolation=interpolation, **kwargs)
# Transformations are composed according to the order in this dict, not the order in
# yaml config
TRANSFORMATION_TO_NAME = OrderedDict(
[
("resize", T.Resize),
("center_crop", T.CenterCrop),
("random_crop", T.RandomCrop),
("random_resized_crop", T.RandomResizedCrop),
("random_horizontal_flip", T.RandomHorizontalFlip),
("rand_augment", RandAugment),
("auto_augment", AutoAugment),
("trivial_augment_wide", TrivialAugmentWide),
("to_tensor", T.ToTensor),
("random_erase", T.RandomErasing),
("normalize", T.Normalize),
]
)
def timm_resize_crop_norm(config: Dict[str, Any]) -> torch.nn.Module:
"""Set Resize/RandomCrop/Normalization parameters from configs of a Timm teacher."""
teacher_name = config["timm_resize_crop_norm"]["name"]
cfg = timm.models.get_pretrained_cfg(teacher_name).to_dict()
if "test_input_size" in cfg:
img_size = list(cfg["test_input_size"])[-1]
else:
img_size = list(cfg["input_size"])[-1]
# Crop ratio and image size for optimal performance of a Timm model
crop_pct = cfg["crop_pct"]
scale_size = int(math.floor(img_size / crop_pct))
interpolation = cfg["interpolation"]
config["resize"] = {
"size": scale_size,
"interpolation": str_to_interp_mode(interpolation),
}
config["random_crop"] = {
"size": img_size,
"pad_if_needed": True,
}
config["normalize"] = {"mean": cfg["mean"], "std": cfg["std"]}
return config
def clean_config(config: Dict[str, Dict[str, Any]]) -> Dict[str, Dict[str, Any]]:
"""Return a clone of configs and remove unnecessary keys from configurations."""
new_config = {}
for k, v in config.items():
vv = dict(v)
if vv.pop("enable", True):
new_config[k] = vv
return new_config
def compose_from_config(config_tr: Dict[str, Any]) -> torch.nn.Module: