partition
stringclasses
3 values
func_name
stringlengths
1
134
docstring
stringlengths
1
46.9k
path
stringlengths
4
223
original_string
stringlengths
75
104k
code
stringlengths
75
104k
docstring_tokens
listlengths
1
1.97k
repo
stringlengths
7
55
language
stringclasses
1 value
url
stringlengths
87
315
code_tokens
listlengths
19
28.4k
sha
stringlengths
40
40
train
CycleGANTrainer.on_epoch_end
Put the various losses in the recorder.
fastai/vision/cyclegan.py
def on_epoch_end(self, last_metrics, **kwargs): "Put the various losses in the recorder." return add_metrics(last_metrics, [s.smooth for k,s in self.smootheners.items()])
def on_epoch_end(self, last_metrics, **kwargs): "Put the various losses in the recorder." return add_metrics(last_metrics, [s.smooth for k,s in self.smootheners.items()])
[ "Put", "the", "various", "losses", "in", "the", "recorder", "." ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/vision/cyclegan.py#L183-L185
[ "def", "on_epoch_end", "(", "self", ",", "last_metrics", ",", "*", "*", "kwargs", ")", ":", "return", "add_metrics", "(", "last_metrics", ",", "[", "s", ".", "smooth", "for", "k", ",", "s", "in", "self", ".", "smootheners", ".", "items", "(", ")", "]", ")" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
CSVLogger.on_train_begin
Prepare file with metric names.
fastai/callbacks/csv_logger.py
def on_train_begin(self, **kwargs: Any) -> None: "Prepare file with metric names." self.path.parent.mkdir(parents=True, exist_ok=True) self.file = self.path.open('a') if self.append else self.path.open('w') self.file.write(','.join(self.learn.recorder.names[:(None if self.add_time else -1)]) + '\n')
def on_train_begin(self, **kwargs: Any) -> None: "Prepare file with metric names." self.path.parent.mkdir(parents=True, exist_ok=True) self.file = self.path.open('a') if self.append else self.path.open('w') self.file.write(','.join(self.learn.recorder.names[:(None if self.add_time else -1)]) + '\n')
[ "Prepare", "file", "with", "metric", "names", "." ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/callbacks/csv_logger.py#L23-L27
[ "def", "on_train_begin", "(", "self", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "None", ":", "self", ".", "path", ".", "parent", ".", "mkdir", "(", "parents", "=", "True", ",", "exist_ok", "=", "True", ")", "self", ".", "file", "=", "self", ".", "path", ".", "open", "(", "'a'", ")", "if", "self", ".", "append", "else", "self", ".", "path", ".", "open", "(", "'w'", ")", "self", ".", "file", ".", "write", "(", "','", ".", "join", "(", "self", ".", "learn", ".", "recorder", ".", "names", "[", ":", "(", "None", "if", "self", ".", "add_time", "else", "-", "1", ")", "]", ")", "+", "'\\n'", ")" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
CSVLogger.on_epoch_end
Add a line with `epoch` number, `smooth_loss` and `last_metrics`.
fastai/callbacks/csv_logger.py
def on_epoch_end(self, epoch: int, smooth_loss: Tensor, last_metrics: MetricsList, **kwargs: Any) -> bool: "Add a line with `epoch` number, `smooth_loss` and `last_metrics`." last_metrics = ifnone(last_metrics, []) stats = [str(stat) if isinstance(stat, int) else '#na#' if stat is None else f'{stat:.6f}' for name, stat in zip(self.learn.recorder.names, [epoch, smooth_loss] + last_metrics)] if self.add_time: stats.append(format_time(time() - self.start_epoch)) str_stats = ','.join(stats) self.file.write(str_stats + '\n')
def on_epoch_end(self, epoch: int, smooth_loss: Tensor, last_metrics: MetricsList, **kwargs: Any) -> bool: "Add a line with `epoch` number, `smooth_loss` and `last_metrics`." last_metrics = ifnone(last_metrics, []) stats = [str(stat) if isinstance(stat, int) else '#na#' if stat is None else f'{stat:.6f}' for name, stat in zip(self.learn.recorder.names, [epoch, smooth_loss] + last_metrics)] if self.add_time: stats.append(format_time(time() - self.start_epoch)) str_stats = ','.join(stats) self.file.write(str_stats + '\n')
[ "Add", "a", "line", "with", "epoch", "number", "smooth_loss", "and", "last_metrics", "." ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/callbacks/csv_logger.py#L32-L39
[ "def", "on_epoch_end", "(", "self", ",", "epoch", ":", "int", ",", "smooth_loss", ":", "Tensor", ",", "last_metrics", ":", "MetricsList", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "bool", ":", "last_metrics", "=", "ifnone", "(", "last_metrics", ",", "[", "]", ")", "stats", "=", "[", "str", "(", "stat", ")", "if", "isinstance", "(", "stat", ",", "int", ")", "else", "'#na#'", "if", "stat", "is", "None", "else", "f'{stat:.6f}'", "for", "name", ",", "stat", "in", "zip", "(", "self", ".", "learn", ".", "recorder", ".", "names", ",", "[", "epoch", ",", "smooth_loss", "]", "+", "last_metrics", ")", "]", "if", "self", ".", "add_time", ":", "stats", ".", "append", "(", "format_time", "(", "time", "(", ")", "-", "self", ".", "start_epoch", ")", ")", "str_stats", "=", "','", ".", "join", "(", "stats", ")", "self", ".", "file", ".", "write", "(", "str_stats", "+", "'\\n'", ")" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
get_master
Return two lists, one for the model parameters in FP16 and one for the master parameters in FP32.
fastai/callbacks/fp16.py
def get_master(layer_groups:ModuleList, flat_master:bool=False) -> Tuple[List[List[Tensor]], List[List[Tensor]]]: "Return two lists, one for the model parameters in FP16 and one for the master parameters in FP32." split_params = split_no_wd_params(layer_groups) model_params = [[param for param in pg if param.requires_grad] for pg in split_params] if flat_master: master_params = [] for lg in model_params: if len(lg) !=0 : mp = parameters_to_vector([param.data.float() for param in lg]) mp = torch.nn.Parameter(mp, requires_grad=True) if mp.grad is None: mp.grad = mp.new(*mp.size()) master_params.append([mp]) else: master_params.append([]) return model_params, master_params else: master_params = [[param.clone().float().detach() for param in lg] for lg in model_params] for mp in master_params: for param in mp: param.requires_grad = True return model_params, master_params
def get_master(layer_groups:ModuleList, flat_master:bool=False) -> Tuple[List[List[Tensor]], List[List[Tensor]]]: "Return two lists, one for the model parameters in FP16 and one for the master parameters in FP32." split_params = split_no_wd_params(layer_groups) model_params = [[param for param in pg if param.requires_grad] for pg in split_params] if flat_master: master_params = [] for lg in model_params: if len(lg) !=0 : mp = parameters_to_vector([param.data.float() for param in lg]) mp = torch.nn.Parameter(mp, requires_grad=True) if mp.grad is None: mp.grad = mp.new(*mp.size()) master_params.append([mp]) else: master_params.append([]) return model_params, master_params else: master_params = [[param.clone().float().detach() for param in lg] for lg in model_params] for mp in master_params: for param in mp: param.requires_grad = True return model_params, master_params
[ "Return", "two", "lists", "one", "for", "the", "model", "parameters", "in", "FP16", "and", "one", "for", "the", "master", "parameters", "in", "FP32", "." ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/callbacks/fp16.py#L10-L28
[ "def", "get_master", "(", "layer_groups", ":", "ModuleList", ",", "flat_master", ":", "bool", "=", "False", ")", "->", "Tuple", "[", "List", "[", "List", "[", "Tensor", "]", "]", ",", "List", "[", "List", "[", "Tensor", "]", "]", "]", ":", "split_params", "=", "split_no_wd_params", "(", "layer_groups", ")", "model_params", "=", "[", "[", "param", "for", "param", "in", "pg", "if", "param", ".", "requires_grad", "]", "for", "pg", "in", "split_params", "]", "if", "flat_master", ":", "master_params", "=", "[", "]", "for", "lg", "in", "model_params", ":", "if", "len", "(", "lg", ")", "!=", "0", ":", "mp", "=", "parameters_to_vector", "(", "[", "param", ".", "data", ".", "float", "(", ")", "for", "param", "in", "lg", "]", ")", "mp", "=", "torch", ".", "nn", ".", "Parameter", "(", "mp", ",", "requires_grad", "=", "True", ")", "if", "mp", ".", "grad", "is", "None", ":", "mp", ".", "grad", "=", "mp", ".", "new", "(", "*", "mp", ".", "size", "(", ")", ")", "master_params", ".", "append", "(", "[", "mp", "]", ")", "else", ":", "master_params", ".", "append", "(", "[", "]", ")", "return", "model_params", ",", "master_params", "else", ":", "master_params", "=", "[", "[", "param", ".", "clone", "(", ")", ".", "float", "(", ")", ".", "detach", "(", ")", "for", "param", "in", "lg", "]", "for", "lg", "in", "model_params", "]", "for", "mp", "in", "master_params", ":", "for", "param", "in", "mp", ":", "param", ".", "requires_grad", "=", "True", "return", "model_params", ",", "master_params" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
model_g2master_g
Copy the `model_params` gradients to `master_params` for the optimizer step.
fastai/callbacks/fp16.py
def model_g2master_g(model_params:Sequence[Tensor], master_params:Sequence[Tensor], flat_master:bool=False)->None: "Copy the `model_params` gradients to `master_params` for the optimizer step." if flat_master: for model_group,master_group in zip(model_params,master_params): if len(master_group) != 0: if master_group[0].grad is None: master_group[0].grad = master_group[0].data.new(*master_group[0].data.size()) master_group[0].grad.data.copy_(parameters_to_vector([p.grad.data.float() for p in model_group])) else: for model_group,master_group in zip(model_params,master_params): for model, master in zip(model_group, master_group): if model.grad is not None: if master.grad is None: master.grad = master.data.new(*master.data.size()) master.grad.data.copy_(model.grad.data) else: master.grad = None
def model_g2master_g(model_params:Sequence[Tensor], master_params:Sequence[Tensor], flat_master:bool=False)->None: "Copy the `model_params` gradients to `master_params` for the optimizer step." if flat_master: for model_group,master_group in zip(model_params,master_params): if len(master_group) != 0: if master_group[0].grad is None: master_group[0].grad = master_group[0].data.new(*master_group[0].data.size()) master_group[0].grad.data.copy_(parameters_to_vector([p.grad.data.float() for p in model_group])) else: for model_group,master_group in zip(model_params,master_params): for model, master in zip(model_group, master_group): if model.grad is not None: if master.grad is None: master.grad = master.data.new(*master.data.size()) master.grad.data.copy_(model.grad.data) else: master.grad = None
[ "Copy", "the", "model_params", "gradients", "to", "master_params", "for", "the", "optimizer", "step", "." ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/callbacks/fp16.py#L30-L43
[ "def", "model_g2master_g", "(", "model_params", ":", "Sequence", "[", "Tensor", "]", ",", "master_params", ":", "Sequence", "[", "Tensor", "]", ",", "flat_master", ":", "bool", "=", "False", ")", "->", "None", ":", "if", "flat_master", ":", "for", "model_group", ",", "master_group", "in", "zip", "(", "model_params", ",", "master_params", ")", ":", "if", "len", "(", "master_group", ")", "!=", "0", ":", "if", "master_group", "[", "0", "]", ".", "grad", "is", "None", ":", "master_group", "[", "0", "]", ".", "grad", "=", "master_group", "[", "0", "]", ".", "data", ".", "new", "(", "*", "master_group", "[", "0", "]", ".", "data", ".", "size", "(", ")", ")", "master_group", "[", "0", "]", ".", "grad", ".", "data", ".", "copy_", "(", "parameters_to_vector", "(", "[", "p", ".", "grad", ".", "data", ".", "float", "(", ")", "for", "p", "in", "model_group", "]", ")", ")", "else", ":", "for", "model_group", ",", "master_group", "in", "zip", "(", "model_params", ",", "master_params", ")", ":", "for", "model", ",", "master", "in", "zip", "(", "model_group", ",", "master_group", ")", ":", "if", "model", ".", "grad", "is", "not", "None", ":", "if", "master", ".", "grad", "is", "None", ":", "master", ".", "grad", "=", "master", ".", "data", ".", "new", "(", "*", "master", ".", "data", ".", "size", "(", ")", ")", "master", ".", "grad", ".", "data", ".", "copy_", "(", "model", ".", "grad", ".", "data", ")", "else", ":", "master", ".", "grad", "=", "None" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
master2model
Copy `master_params` to `model_params`.
fastai/callbacks/fp16.py
def master2model(model_params:Sequence[Tensor], master_params:Sequence[Tensor], flat_master:bool=False)->None: "Copy `master_params` to `model_params`." if flat_master: for model_group,master_group in zip(model_params,master_params): if len(model_group) != 0: for model, master in zip(model_group, _unflatten_dense_tensors(master_group[0].data, model_group)): model.data.copy_(master) else: for model_group,master_group in zip(model_params,master_params): for model, master in zip(model_group, master_group): model.data.copy_(master.data)
def master2model(model_params:Sequence[Tensor], master_params:Sequence[Tensor], flat_master:bool=False)->None: "Copy `master_params` to `model_params`." if flat_master: for model_group,master_group in zip(model_params,master_params): if len(model_group) != 0: for model, master in zip(model_group, _unflatten_dense_tensors(master_group[0].data, model_group)): model.data.copy_(master) else: for model_group,master_group in zip(model_params,master_params): for model, master in zip(model_group, master_group): model.data.copy_(master.data)
[ "Copy", "master_params", "to", "model_params", "." ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/callbacks/fp16.py#L45-L54
[ "def", "master2model", "(", "model_params", ":", "Sequence", "[", "Tensor", "]", ",", "master_params", ":", "Sequence", "[", "Tensor", "]", ",", "flat_master", ":", "bool", "=", "False", ")", "->", "None", ":", "if", "flat_master", ":", "for", "model_group", ",", "master_group", "in", "zip", "(", "model_params", ",", "master_params", ")", ":", "if", "len", "(", "model_group", ")", "!=", "0", ":", "for", "model", ",", "master", "in", "zip", "(", "model_group", ",", "_unflatten_dense_tensors", "(", "master_group", "[", "0", "]", ".", "data", ",", "model_group", ")", ")", ":", "model", ".", "data", ".", "copy_", "(", "master", ")", "else", ":", "for", "model_group", ",", "master_group", "in", "zip", "(", "model_params", ",", "master_params", ")", ":", "for", "model", ",", "master", "in", "zip", "(", "model_group", ",", "master_group", ")", ":", "model", ".", "data", ".", "copy_", "(", "master", ".", "data", ")" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
MixedPrecision.on_train_begin
Prepare the master model.
fastai/callbacks/fp16.py
def on_train_begin(self, **kwargs:Any)->None: "Prepare the master model." #Get a copy of the model params in FP32 self.model_params, self.master_params = get_master(self.learn.layer_groups, self.flat_master) #Changes the optimizer so that the optimization step is done in FP32. new_opt = self.learn.opt.new_with_params(self.master_params) if self.opt is not None: self.opt.lr,self.opt.wd = self.learn.opt.lr,self.learn.opt.wd new_opt.load_state_dict(self.opt) self.learn.opt.opt = new_opt.opt self.noskip = 0
def on_train_begin(self, **kwargs:Any)->None: "Prepare the master model." #Get a copy of the model params in FP32 self.model_params, self.master_params = get_master(self.learn.layer_groups, self.flat_master) #Changes the optimizer so that the optimization step is done in FP32. new_opt = self.learn.opt.new_with_params(self.master_params) if self.opt is not None: self.opt.lr,self.opt.wd = self.learn.opt.lr,self.learn.opt.wd new_opt.load_state_dict(self.opt) self.learn.opt.opt = new_opt.opt self.noskip = 0
[ "Prepare", "the", "master", "model", "." ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/callbacks/fp16.py#L76-L86
[ "def", "on_train_begin", "(", "self", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "None", ":", "#Get a copy of the model params in FP32", "self", ".", "model_params", ",", "self", ".", "master_params", "=", "get_master", "(", "self", ".", "learn", ".", "layer_groups", ",", "self", ".", "flat_master", ")", "#Changes the optimizer so that the optimization step is done in FP32.", "new_opt", "=", "self", ".", "learn", ".", "opt", ".", "new_with_params", "(", "self", ".", "master_params", ")", "if", "self", ".", "opt", "is", "not", "None", ":", "self", ".", "opt", ".", "lr", ",", "self", ".", "opt", ".", "wd", "=", "self", ".", "learn", ".", "opt", ".", "lr", ",", "self", ".", "learn", ".", "opt", ".", "wd", "new_opt", ".", "load_state_dict", "(", "self", ".", "opt", ")", "self", ".", "learn", ".", "opt", ".", "opt", "=", "new_opt", ".", "opt", "self", ".", "noskip", "=", "0" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
MixedPrecision.on_backward_begin
Scale gradients up by `self.loss_scale` to prevent underflow.
fastai/callbacks/fp16.py
def on_backward_begin(self, last_loss:Rank0Tensor, **kwargs:Any) -> Rank0Tensor: "Scale gradients up by `self.loss_scale` to prevent underflow." #To avoid gradient underflow, we scale the gradients ret_loss = last_loss * self.loss_scale return {'last_loss': ret_loss}
def on_backward_begin(self, last_loss:Rank0Tensor, **kwargs:Any) -> Rank0Tensor: "Scale gradients up by `self.loss_scale` to prevent underflow." #To avoid gradient underflow, we scale the gradients ret_loss = last_loss * self.loss_scale return {'last_loss': ret_loss}
[ "Scale", "gradients", "up", "by", "self", ".", "loss_scale", "to", "prevent", "underflow", "." ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/callbacks/fp16.py#L92-L96
[ "def", "on_backward_begin", "(", "self", ",", "last_loss", ":", "Rank0Tensor", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "Rank0Tensor", ":", "#To avoid gradient underflow, we scale the gradients", "ret_loss", "=", "last_loss", "*", "self", ".", "loss_scale", "return", "{", "'last_loss'", ":", "ret_loss", "}" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
MixedPrecision.on_backward_end
Convert the gradients back to FP32 and divide them by the scale.
fastai/callbacks/fp16.py
def on_backward_end(self, **kwargs:Any)->None: "Convert the gradients back to FP32 and divide them by the scale." if self.dynamic and grad_overflow(self.model_params) and self.loss_scale > 1: self.loss_scale /= 2 self.noskip = 0 #The step will be skipped since we don't update the master grads so they are all None or zero else: model_g2master_g(self.model_params, self.master_params, self.flat_master) for group in self.master_params: for param in group: if param.grad is not None: param.grad.div_(self.loss_scale) if self.clip is not None: for group in self.master_params: nn.utils.clip_grad_norm_(group, self.clip) if not self.dynamic: return self.noskip += 1 if self.noskip >= self.max_noskip and self.loss_scale < self.max_scale: self.loss_scale *= 2 self.noskip = 0
def on_backward_end(self, **kwargs:Any)->None: "Convert the gradients back to FP32 and divide them by the scale." if self.dynamic and grad_overflow(self.model_params) and self.loss_scale > 1: self.loss_scale /= 2 self.noskip = 0 #The step will be skipped since we don't update the master grads so they are all None or zero else: model_g2master_g(self.model_params, self.master_params, self.flat_master) for group in self.master_params: for param in group: if param.grad is not None: param.grad.div_(self.loss_scale) if self.clip is not None: for group in self.master_params: nn.utils.clip_grad_norm_(group, self.clip) if not self.dynamic: return self.noskip += 1 if self.noskip >= self.max_noskip and self.loss_scale < self.max_scale: self.loss_scale *= 2 self.noskip = 0
[ "Convert", "the", "gradients", "back", "to", "FP32", "and", "divide", "them", "by", "the", "scale", "." ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/callbacks/fp16.py#L98-L115
[ "def", "on_backward_end", "(", "self", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "None", ":", "if", "self", ".", "dynamic", "and", "grad_overflow", "(", "self", ".", "model_params", ")", "and", "self", ".", "loss_scale", ">", "1", ":", "self", ".", "loss_scale", "/=", "2", "self", ".", "noskip", "=", "0", "#The step will be skipped since we don't update the master grads so they are all None or zero", "else", ":", "model_g2master_g", "(", "self", ".", "model_params", ",", "self", ".", "master_params", ",", "self", ".", "flat_master", ")", "for", "group", "in", "self", ".", "master_params", ":", "for", "param", "in", "group", ":", "if", "param", ".", "grad", "is", "not", "None", ":", "param", ".", "grad", ".", "div_", "(", "self", ".", "loss_scale", ")", "if", "self", ".", "clip", "is", "not", "None", ":", "for", "group", "in", "self", ".", "master_params", ":", "nn", ".", "utils", ".", "clip_grad_norm_", "(", "group", ",", "self", ".", "clip", ")", "if", "not", "self", ".", "dynamic", ":", "return", "self", ".", "noskip", "+=", "1", "if", "self", ".", "noskip", ">=", "self", ".", "max_noskip", "and", "self", ".", "loss_scale", "<", "self", ".", "max_scale", ":", "self", ".", "loss_scale", "*=", "2", "self", ".", "noskip", "=", "0" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
MixedPrecision.on_step_end
Update the params from master to model and zero grad.
fastai/callbacks/fp16.py
def on_step_end(self, **kwargs:Any)->None: "Update the params from master to model and zero grad." #Zeros the gradients of the model since the optimizer is disconnected. self.learn.model.zero_grad() #Update the params from master to model. master2model(self.model_params, self.master_params, self.flat_master)
def on_step_end(self, **kwargs:Any)->None: "Update the params from master to model and zero grad." #Zeros the gradients of the model since the optimizer is disconnected. self.learn.model.zero_grad() #Update the params from master to model. master2model(self.model_params, self.master_params, self.flat_master)
[ "Update", "the", "params", "from", "master", "to", "model", "and", "zero", "grad", "." ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/callbacks/fp16.py#L117-L122
[ "def", "on_step_end", "(", "self", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "None", ":", "#Zeros the gradients of the model since the optimizer is disconnected.", "self", ".", "learn", ".", "model", ".", "zero_grad", "(", ")", "#Update the params from master to model.", "master2model", "(", "self", ".", "model_params", ",", "self", ".", "master_params", ",", "self", ".", "flat_master", ")" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
scale_min
Scale the image so that the smallest axis is of size targ. Arguments: im (array): image targ (int): target size
old/fastai/transforms.py
def scale_min(im, targ, interpolation=cv2.INTER_AREA): """ Scale the image so that the smallest axis is of size targ. Arguments: im (array): image targ (int): target size """ r,c,*_ = im.shape ratio = targ/min(r,c) sz = (scale_to(c, ratio, targ), scale_to(r, ratio, targ)) return cv2.resize(im, sz, interpolation=interpolation)
def scale_min(im, targ, interpolation=cv2.INTER_AREA): """ Scale the image so that the smallest axis is of size targ. Arguments: im (array): image targ (int): target size """ r,c,*_ = im.shape ratio = targ/min(r,c) sz = (scale_to(c, ratio, targ), scale_to(r, ratio, targ)) return cv2.resize(im, sz, interpolation=interpolation)
[ "Scale", "the", "image", "so", "that", "the", "smallest", "axis", "is", "of", "size", "targ", "." ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/old/fastai/transforms.py#L5-L15
[ "def", "scale_min", "(", "im", ",", "targ", ",", "interpolation", "=", "cv2", ".", "INTER_AREA", ")", ":", "r", ",", "c", ",", "", "*", "_", "=", "im", ".", "shape", "ratio", "=", "targ", "/", "min", "(", "r", ",", "c", ")", "sz", "=", "(", "scale_to", "(", "c", ",", "ratio", ",", "targ", ")", ",", "scale_to", "(", "r", ",", "ratio", ",", "targ", ")", ")", "return", "cv2", ".", "resize", "(", "im", ",", "sz", ",", "interpolation", "=", "interpolation", ")" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
zoom_cv
Zoom the center of image x by a factor of z+1 while retaining the original image size and proportion.
old/fastai/transforms.py
def zoom_cv(x,z): """ Zoom the center of image x by a factor of z+1 while retaining the original image size and proportion. """ if z==0: return x r,c,*_ = x.shape M = cv2.getRotationMatrix2D((c/2,r/2),0,z+1.) return cv2.warpAffine(x,M,(c,r))
def zoom_cv(x,z): """ Zoom the center of image x by a factor of z+1 while retaining the original image size and proportion. """ if z==0: return x r,c,*_ = x.shape M = cv2.getRotationMatrix2D((c/2,r/2),0,z+1.) return cv2.warpAffine(x,M,(c,r))
[ "Zoom", "the", "center", "of", "image", "x", "by", "a", "factor", "of", "z", "+", "1", "while", "retaining", "the", "original", "image", "size", "and", "proportion", "." ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/old/fastai/transforms.py#L17-L22
[ "def", "zoom_cv", "(", "x", ",", "z", ")", ":", "if", "z", "==", "0", ":", "return", "x", "r", ",", "c", ",", "", "*", "_", "=", "x", ".", "shape", "M", "=", "cv2", ".", "getRotationMatrix2D", "(", "(", "c", "/", "2", ",", "r", "/", "2", ")", ",", "0", ",", "z", "+", "1.", ")", "return", "cv2", ".", "warpAffine", "(", "x", ",", "M", ",", "(", "c", ",", "r", ")", ")" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
stretch_cv
Stretches image x horizontally by sr+1, and vertically by sc+1 while retaining the original image size and proportion.
old/fastai/transforms.py
def stretch_cv(x,sr,sc,interpolation=cv2.INTER_AREA): """ Stretches image x horizontally by sr+1, and vertically by sc+1 while retaining the original image size and proportion. """ if sr==0 and sc==0: return x r,c,*_ = x.shape x = cv2.resize(x, None, fx=sr+1, fy=sc+1, interpolation=interpolation) nr,nc,*_ = x.shape cr = (nr-r)//2; cc = (nc-c)//2 return x[cr:r+cr, cc:c+cc]
def stretch_cv(x,sr,sc,interpolation=cv2.INTER_AREA): """ Stretches image x horizontally by sr+1, and vertically by sc+1 while retaining the original image size and proportion. """ if sr==0 and sc==0: return x r,c,*_ = x.shape x = cv2.resize(x, None, fx=sr+1, fy=sc+1, interpolation=interpolation) nr,nc,*_ = x.shape cr = (nr-r)//2; cc = (nc-c)//2 return x[cr:r+cr, cc:c+cc]
[ "Stretches", "image", "x", "horizontally", "by", "sr", "+", "1", "and", "vertically", "by", "sc", "+", "1", "while", "retaining", "the", "original", "image", "size", "and", "proportion", "." ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/old/fastai/transforms.py#L24-L31
[ "def", "stretch_cv", "(", "x", ",", "sr", ",", "sc", ",", "interpolation", "=", "cv2", ".", "INTER_AREA", ")", ":", "if", "sr", "==", "0", "and", "sc", "==", "0", ":", "return", "x", "r", ",", "c", ",", "", "*", "_", "=", "x", ".", "shape", "x", "=", "cv2", ".", "resize", "(", "x", ",", "None", ",", "fx", "=", "sr", "+", "1", ",", "fy", "=", "sc", "+", "1", ",", "interpolation", "=", "interpolation", ")", "nr", ",", "nc", ",", "", "*", "_", "=", "x", ".", "shape", "cr", "=", "(", "nr", "-", "r", ")", "//", "2", "cc", "=", "(", "nc", "-", "c", ")", "//", "2", "return", "x", "[", "cr", ":", "r", "+", "cr", ",", "cc", ":", "c", "+", "cc", "]" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
dihedral
Perform any of 8 permutations of 90-degrees rotations or flips for image x.
old/fastai/transforms.py
def dihedral(x, dih): """ Perform any of 8 permutations of 90-degrees rotations or flips for image x. """ x = np.rot90(x, dih%4) return x if dih<4 else np.fliplr(x)
def dihedral(x, dih): """ Perform any of 8 permutations of 90-degrees rotations or flips for image x. """ x = np.rot90(x, dih%4) return x if dih<4 else np.fliplr(x)
[ "Perform", "any", "of", "8", "permutations", "of", "90", "-", "degrees", "rotations", "or", "flips", "for", "image", "x", "." ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/old/fastai/transforms.py#L33-L36
[ "def", "dihedral", "(", "x", ",", "dih", ")", ":", "x", "=", "np", ".", "rot90", "(", "x", ",", "dih", "%", "4", ")", "return", "x", "if", "dih", "<", "4", "else", "np", ".", "fliplr", "(", "x", ")" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
lighting
Adjust image balance and contrast
old/fastai/transforms.py
def lighting(im, b, c): """ Adjust image balance and contrast """ if b==0 and c==1: return im mu = np.average(im) return np.clip((im-mu)*c+mu+b,0.,1.).astype(np.float32)
def lighting(im, b, c): """ Adjust image balance and contrast """ if b==0 and c==1: return im mu = np.average(im) return np.clip((im-mu)*c+mu+b,0.,1.).astype(np.float32)
[ "Adjust", "image", "balance", "and", "contrast" ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/old/fastai/transforms.py#L38-L42
[ "def", "lighting", "(", "im", ",", "b", ",", "c", ")", ":", "if", "b", "==", "0", "and", "c", "==", "1", ":", "return", "im", "mu", "=", "np", ".", "average", "(", "im", ")", "return", "np", ".", "clip", "(", "(", "im", "-", "mu", ")", "*", "c", "+", "mu", "+", "b", ",", "0.", ",", "1.", ")", ".", "astype", "(", "np", ".", "float32", ")" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
no_crop
Return a squared resized image
old/fastai/transforms.py
def no_crop(im, min_sz=None, interpolation=cv2.INTER_AREA): """ Return a squared resized image """ r,c,*_ = im.shape if min_sz is None: min_sz = min(r,c) return cv2.resize(im, (min_sz, min_sz), interpolation=interpolation)
def no_crop(im, min_sz=None, interpolation=cv2.INTER_AREA): """ Return a squared resized image """ r,c,*_ = im.shape if min_sz is None: min_sz = min(r,c) return cv2.resize(im, (min_sz, min_sz), interpolation=interpolation)
[ "Return", "a", "squared", "resized", "image" ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/old/fastai/transforms.py#L54-L58
[ "def", "no_crop", "(", "im", ",", "min_sz", "=", "None", ",", "interpolation", "=", "cv2", ".", "INTER_AREA", ")", ":", "r", ",", "c", ",", "", "*", "_", "=", "im", ".", "shape", "if", "min_sz", "is", "None", ":", "min_sz", "=", "min", "(", "r", ",", "c", ")", "return", "cv2", ".", "resize", "(", "im", ",", "(", "min_sz", ",", "min_sz", ")", ",", "interpolation", "=", "interpolation", ")" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
center_crop
Return a center crop of an image
old/fastai/transforms.py
def center_crop(im, min_sz=None): """ Return a center crop of an image """ r,c,*_ = im.shape if min_sz is None: min_sz = min(r,c) start_r = math.ceil((r-min_sz)/2) start_c = math.ceil((c-min_sz)/2) return crop(im, start_r, start_c, min_sz)
def center_crop(im, min_sz=None): """ Return a center crop of an image """ r,c,*_ = im.shape if min_sz is None: min_sz = min(r,c) start_r = math.ceil((r-min_sz)/2) start_c = math.ceil((c-min_sz)/2) return crop(im, start_r, start_c, min_sz)
[ "Return", "a", "center", "crop", "of", "an", "image" ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/old/fastai/transforms.py#L60-L66
[ "def", "center_crop", "(", "im", ",", "min_sz", "=", "None", ")", ":", "r", ",", "c", ",", "", "*", "_", "=", "im", ".", "shape", "if", "min_sz", "is", "None", ":", "min_sz", "=", "min", "(", "r", ",", "c", ")", "start_r", "=", "math", ".", "ceil", "(", "(", "r", "-", "min_sz", ")", "/", "2", ")", "start_c", "=", "math", ".", "ceil", "(", "(", "c", "-", "min_sz", ")", "/", "2", ")", "return", "crop", "(", "im", ",", "start_r", ",", "start_c", ",", "min_sz", ")" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
googlenet_resize
Randomly crop an image with an aspect ratio and returns a squared resized image of size targ References: 1. https://arxiv.org/pdf/1409.4842.pdf 2. https://arxiv.org/pdf/1802.07888.pdf
old/fastai/transforms.py
def googlenet_resize(im, targ, min_area_frac, min_aspect_ratio, max_aspect_ratio, flip_hw_p, interpolation=cv2.INTER_AREA): """ Randomly crop an image with an aspect ratio and returns a squared resized image of size targ References: 1. https://arxiv.org/pdf/1409.4842.pdf 2. https://arxiv.org/pdf/1802.07888.pdf """ h,w,*_ = im.shape area = h*w for _ in range(10): targetArea = random.uniform(min_area_frac, 1.0) * area aspectR = random.uniform(min_aspect_ratio, max_aspect_ratio) ww = int(np.sqrt(targetArea * aspectR) + 0.5) hh = int(np.sqrt(targetArea / aspectR) + 0.5) if flip_hw_p: ww, hh = hh, ww if hh <= h and ww <= w: x1 = 0 if w == ww else random.randint(0, w - ww) y1 = 0 if h == hh else random.randint(0, h - hh) out = im[y1:y1 + hh, x1:x1 + ww] out = cv2.resize(out, (targ, targ), interpolation=interpolation) return out out = scale_min(im, targ, interpolation=interpolation) out = center_crop(out) return out
def googlenet_resize(im, targ, min_area_frac, min_aspect_ratio, max_aspect_ratio, flip_hw_p, interpolation=cv2.INTER_AREA): """ Randomly crop an image with an aspect ratio and returns a squared resized image of size targ References: 1. https://arxiv.org/pdf/1409.4842.pdf 2. https://arxiv.org/pdf/1802.07888.pdf """ h,w,*_ = im.shape area = h*w for _ in range(10): targetArea = random.uniform(min_area_frac, 1.0) * area aspectR = random.uniform(min_aspect_ratio, max_aspect_ratio) ww = int(np.sqrt(targetArea * aspectR) + 0.5) hh = int(np.sqrt(targetArea / aspectR) + 0.5) if flip_hw_p: ww, hh = hh, ww if hh <= h and ww <= w: x1 = 0 if w == ww else random.randint(0, w - ww) y1 = 0 if h == hh else random.randint(0, h - hh) out = im[y1:y1 + hh, x1:x1 + ww] out = cv2.resize(out, (targ, targ), interpolation=interpolation) return out out = scale_min(im, targ, interpolation=interpolation) out = center_crop(out) return out
[ "Randomly", "crop", "an", "image", "with", "an", "aspect", "ratio", "and", "returns", "a", "squared", "resized", "image", "of", "size", "targ", "References", ":", "1", ".", "https", ":", "//", "arxiv", ".", "org", "/", "pdf", "/", "1409", ".", "4842", ".", "pdf", "2", ".", "https", ":", "//", "arxiv", ".", "org", "/", "pdf", "/", "1802", ".", "07888", ".", "pdf" ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/old/fastai/transforms.py#L68-L92
[ "def", "googlenet_resize", "(", "im", ",", "targ", ",", "min_area_frac", ",", "min_aspect_ratio", ",", "max_aspect_ratio", ",", "flip_hw_p", ",", "interpolation", "=", "cv2", ".", "INTER_AREA", ")", ":", "h", ",", "w", ",", "", "*", "_", "=", "im", ".", "shape", "area", "=", "h", "*", "w", "for", "_", "in", "range", "(", "10", ")", ":", "targetArea", "=", "random", ".", "uniform", "(", "min_area_frac", ",", "1.0", ")", "*", "area", "aspectR", "=", "random", ".", "uniform", "(", "min_aspect_ratio", ",", "max_aspect_ratio", ")", "ww", "=", "int", "(", "np", ".", "sqrt", "(", "targetArea", "*", "aspectR", ")", "+", "0.5", ")", "hh", "=", "int", "(", "np", ".", "sqrt", "(", "targetArea", "/", "aspectR", ")", "+", "0.5", ")", "if", "flip_hw_p", ":", "ww", ",", "hh", "=", "hh", ",", "ww", "if", "hh", "<=", "h", "and", "ww", "<=", "w", ":", "x1", "=", "0", "if", "w", "==", "ww", "else", "random", ".", "randint", "(", "0", ",", "w", "-", "ww", ")", "y1", "=", "0", "if", "h", "==", "hh", "else", "random", ".", "randint", "(", "0", ",", "h", "-", "hh", ")", "out", "=", "im", "[", "y1", ":", "y1", "+", "hh", ",", "x1", ":", "x1", "+", "ww", "]", "out", "=", "cv2", ".", "resize", "(", "out", ",", "(", "targ", ",", "targ", ")", ",", "interpolation", "=", "interpolation", ")", "return", "out", "out", "=", "scale_min", "(", "im", ",", "targ", ",", "interpolation", "=", "interpolation", ")", "out", "=", "center_crop", "(", "out", ")", "return", "out" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
cutout
Cut out n_holes number of square holes of size length in image at random locations. Holes may overlap.
old/fastai/transforms.py
def cutout(im, n_holes, length): """ Cut out n_holes number of square holes of size length in image at random locations. Holes may overlap. """ r,c,*_ = im.shape mask = np.ones((r, c), np.int32) for n in range(n_holes): y = np.random.randint(0, r) x = np.random.randint(0, c) y1 = int(np.clip(y - length / 2, 0, r)) y2 = int(np.clip(y + length / 2, 0, r)) x1 = int(np.clip(x - length / 2, 0, c)) x2 = int(np.clip(x + length / 2, 0, c)) mask[y1: y2, x1: x2] = 0. mask = mask[:,:,None] im = im * mask return im
def cutout(im, n_holes, length): """ Cut out n_holes number of square holes of size length in image at random locations. Holes may overlap. """ r,c,*_ = im.shape mask = np.ones((r, c), np.int32) for n in range(n_holes): y = np.random.randint(0, r) x = np.random.randint(0, c) y1 = int(np.clip(y - length / 2, 0, r)) y2 = int(np.clip(y + length / 2, 0, r)) x1 = int(np.clip(x - length / 2, 0, c)) x2 = int(np.clip(x + length / 2, 0, c)) mask[y1: y2, x1: x2] = 0. mask = mask[:,:,None] im = im * mask return im
[ "Cut", "out", "n_holes", "number", "of", "square", "holes", "of", "size", "length", "in", "image", "at", "random", "locations", ".", "Holes", "may", "overlap", "." ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/old/fastai/transforms.py#L94-L110
[ "def", "cutout", "(", "im", ",", "n_holes", ",", "length", ")", ":", "r", ",", "c", ",", "", "*", "_", "=", "im", ".", "shape", "mask", "=", "np", ".", "ones", "(", "(", "r", ",", "c", ")", ",", "np", ".", "int32", ")", "for", "n", "in", "range", "(", "n_holes", ")", ":", "y", "=", "np", ".", "random", ".", "randint", "(", "0", ",", "r", ")", "x", "=", "np", ".", "random", ".", "randint", "(", "0", ",", "c", ")", "y1", "=", "int", "(", "np", ".", "clip", "(", "y", "-", "length", "/", "2", ",", "0", ",", "r", ")", ")", "y2", "=", "int", "(", "np", ".", "clip", "(", "y", "+", "length", "/", "2", ",", "0", ",", "r", ")", ")", "x1", "=", "int", "(", "np", ".", "clip", "(", "x", "-", "length", "/", "2", ",", "0", ",", "c", ")", ")", "x2", "=", "int", "(", "np", ".", "clip", "(", "x", "+", "length", "/", "2", ",", "0", ",", "c", ")", ")", "mask", "[", "y1", ":", "y2", ",", "x1", ":", "x2", "]", "=", "0.", "mask", "=", "mask", "[", ":", ",", ":", ",", "None", "]", "im", "=", "im", "*", "mask", "return", "im" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
scale_to
Calculate dimension of an image during scaling with aspect ratio
old/fastai/transforms.py
def scale_to(x, ratio, targ): '''Calculate dimension of an image during scaling with aspect ratio''' return max(math.floor(x*ratio), targ)
def scale_to(x, ratio, targ): '''Calculate dimension of an image during scaling with aspect ratio''' return max(math.floor(x*ratio), targ)
[ "Calculate", "dimension", "of", "an", "image", "during", "scaling", "with", "aspect", "ratio" ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/old/fastai/transforms.py#L112-L114
[ "def", "scale_to", "(", "x", ",", "ratio", ",", "targ", ")", ":", "return", "max", "(", "math", ".", "floor", "(", "x", "*", "ratio", ")", ",", "targ", ")" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
crop
crop image into a square of size sz,
old/fastai/transforms.py
def crop(im, r, c, sz): ''' crop image into a square of size sz, ''' return im[r:r+sz, c:c+sz]
def crop(im, r, c, sz): ''' crop image into a square of size sz, ''' return im[r:r+sz, c:c+sz]
[ "crop", "image", "into", "a", "square", "of", "size", "sz" ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/old/fastai/transforms.py#L116-L120
[ "def", "crop", "(", "im", ",", "r", ",", "c", ",", "sz", ")", ":", "return", "im", "[", "r", ":", "r", "+", "sz", ",", "c", ":", "c", "+", "sz", "]" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
to_bb
Convert mask YY to a bounding box, assumes 0 as background nonzero object
old/fastai/transforms.py
def to_bb(YY, y="deprecated"): """Convert mask YY to a bounding box, assumes 0 as background nonzero object""" cols,rows = np.nonzero(YY) if len(cols)==0: return np.zeros(4, dtype=np.float32) top_row = np.min(rows) left_col = np.min(cols) bottom_row = np.max(rows) right_col = np.max(cols) return np.array([left_col, top_row, right_col, bottom_row], dtype=np.float32)
def to_bb(YY, y="deprecated"): """Convert mask YY to a bounding box, assumes 0 as background nonzero object""" cols,rows = np.nonzero(YY) if len(cols)==0: return np.zeros(4, dtype=np.float32) top_row = np.min(rows) left_col = np.min(cols) bottom_row = np.max(rows) right_col = np.max(cols) return np.array([left_col, top_row, right_col, bottom_row], dtype=np.float32)
[ "Convert", "mask", "YY", "to", "a", "bounding", "box", "assumes", "0", "as", "background", "nonzero", "object" ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/old/fastai/transforms.py#L183-L191
[ "def", "to_bb", "(", "YY", ",", "y", "=", "\"deprecated\"", ")", ":", "cols", ",", "rows", "=", "np", ".", "nonzero", "(", "YY", ")", "if", "len", "(", "cols", ")", "==", "0", ":", "return", "np", ".", "zeros", "(", "4", ",", "dtype", "=", "np", ".", "float32", ")", "top_row", "=", "np", ".", "min", "(", "rows", ")", "left_col", "=", "np", ".", "min", "(", "cols", ")", "bottom_row", "=", "np", ".", "max", "(", "rows", ")", "right_col", "=", "np", ".", "max", "(", "cols", ")", "return", "np", ".", "array", "(", "[", "left_col", ",", "top_row", ",", "right_col", ",", "bottom_row", "]", ",", "dtype", "=", "np", ".", "float32", ")" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
coords2px
Transforming coordinates to pixels. Arguments: y : np array vector in which (y[0], y[1]) and (y[2], y[3]) are the the corners of a bounding box. x : image an image Returns: Y : image of shape x.shape
old/fastai/transforms.py
def coords2px(y, x): """ Transforming coordinates to pixels. Arguments: y : np array vector in which (y[0], y[1]) and (y[2], y[3]) are the the corners of a bounding box. x : image an image Returns: Y : image of shape x.shape """ rows = np.rint([y[0], y[0], y[2], y[2]]).astype(int) cols = np.rint([y[1], y[3], y[1], y[3]]).astype(int) r,c,*_ = x.shape Y = np.zeros((r, c)) Y[rows, cols] = 1 return Y
def coords2px(y, x): """ Transforming coordinates to pixels. Arguments: y : np array vector in which (y[0], y[1]) and (y[2], y[3]) are the the corners of a bounding box. x : image an image Returns: Y : image of shape x.shape """ rows = np.rint([y[0], y[0], y[2], y[2]]).astype(int) cols = np.rint([y[1], y[3], y[1], y[3]]).astype(int) r,c,*_ = x.shape Y = np.zeros((r, c)) Y[rows, cols] = 1 return Y
[ "Transforming", "coordinates", "to", "pixels", "." ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/old/fastai/transforms.py#L194-L212
[ "def", "coords2px", "(", "y", ",", "x", ")", ":", "rows", "=", "np", ".", "rint", "(", "[", "y", "[", "0", "]", ",", "y", "[", "0", "]", ",", "y", "[", "2", "]", ",", "y", "[", "2", "]", "]", ")", ".", "astype", "(", "int", ")", "cols", "=", "np", ".", "rint", "(", "[", "y", "[", "1", "]", ",", "y", "[", "3", "]", ",", "y", "[", "1", "]", ",", "y", "[", "3", "]", "]", ")", ".", "astype", "(", "int", ")", "r", ",", "c", ",", "", "*", "_", "=", "x", ".", "shape", "Y", "=", "np", ".", "zeros", "(", "(", "r", ",", "c", ")", ")", "Y", "[", "rows", ",", "cols", "]", "=", "1", "return", "Y" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
compose
Apply a collection of transformation functions :fns: to images
old/fastai/transforms.py
def compose(im, y, fns): """ Apply a collection of transformation functions :fns: to images """ for fn in fns: #pdb.set_trace() im, y =fn(im, y) return im if y is None else (im, y)
def compose(im, y, fns): """ Apply a collection of transformation functions :fns: to images """ for fn in fns: #pdb.set_trace() im, y =fn(im, y) return im if y is None else (im, y)
[ "Apply", "a", "collection", "of", "transformation", "functions", ":", "fns", ":", "to", "images" ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/old/fastai/transforms.py#L619-L624
[ "def", "compose", "(", "im", ",", "y", ",", "fns", ")", ":", "for", "fn", "in", "fns", ":", "#pdb.set_trace()", "im", ",", "y", "=", "fn", "(", "im", ",", "y", ")", "return", "im", "if", "y", "is", "None", "else", "(", "im", ",", "y", ")" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
image_gen
Generate a standard set of transformations Arguments --------- normalizer : image normalizing function denorm : image denormalizing function sz : size, sz_y = sz if not specified. tfms : iterable collection of transformation functions max_zoom : float, maximum zoom pad : int, padding on top, left, right and bottom crop_type : crop type tfm_y : y axis specific transformations sz_y : y size, height pad_mode : cv2 padding style: repeat, reflect, etc. Returns ------- type : ``Transforms`` transformer for specified image operations. See Also -------- Transforms: the transformer object returned by this function
old/fastai/transforms.py
def image_gen(normalizer, denorm, sz, tfms=None, max_zoom=None, pad=0, crop_type=None, tfm_y=None, sz_y=None, pad_mode=cv2.BORDER_REFLECT, scale=None): """ Generate a standard set of transformations Arguments --------- normalizer : image normalizing function denorm : image denormalizing function sz : size, sz_y = sz if not specified. tfms : iterable collection of transformation functions max_zoom : float, maximum zoom pad : int, padding on top, left, right and bottom crop_type : crop type tfm_y : y axis specific transformations sz_y : y size, height pad_mode : cv2 padding style: repeat, reflect, etc. Returns ------- type : ``Transforms`` transformer for specified image operations. See Also -------- Transforms: the transformer object returned by this function """ if tfm_y is None: tfm_y=TfmType.NO if tfms is None: tfms=[] elif not isinstance(tfms, collections.Iterable): tfms=[tfms] if sz_y is None: sz_y = sz if scale is None: scale = [RandomScale(sz, max_zoom, tfm_y=tfm_y, sz_y=sz_y) if max_zoom is not None else Scale(sz, tfm_y, sz_y=sz_y)] elif not is_listy(scale): scale = [scale] if pad: scale.append(AddPadding(pad, mode=pad_mode)) if crop_type!=CropType.GOOGLENET: tfms=scale+tfms return Transforms(sz, tfms, normalizer, denorm, crop_type, tfm_y=tfm_y, sz_y=sz_y)
def image_gen(normalizer, denorm, sz, tfms=None, max_zoom=None, pad=0, crop_type=None, tfm_y=None, sz_y=None, pad_mode=cv2.BORDER_REFLECT, scale=None): """ Generate a standard set of transformations Arguments --------- normalizer : image normalizing function denorm : image denormalizing function sz : size, sz_y = sz if not specified. tfms : iterable collection of transformation functions max_zoom : float, maximum zoom pad : int, padding on top, left, right and bottom crop_type : crop type tfm_y : y axis specific transformations sz_y : y size, height pad_mode : cv2 padding style: repeat, reflect, etc. Returns ------- type : ``Transforms`` transformer for specified image operations. See Also -------- Transforms: the transformer object returned by this function """ if tfm_y is None: tfm_y=TfmType.NO if tfms is None: tfms=[] elif not isinstance(tfms, collections.Iterable): tfms=[tfms] if sz_y is None: sz_y = sz if scale is None: scale = [RandomScale(sz, max_zoom, tfm_y=tfm_y, sz_y=sz_y) if max_zoom is not None else Scale(sz, tfm_y, sz_y=sz_y)] elif not is_listy(scale): scale = [scale] if pad: scale.append(AddPadding(pad, mode=pad_mode)) if crop_type!=CropType.GOOGLENET: tfms=scale+tfms return Transforms(sz, tfms, normalizer, denorm, crop_type, tfm_y=tfm_y, sz_y=sz_y)
[ "Generate", "a", "standard", "set", "of", "transformations" ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/old/fastai/transforms.py#L652-L700
[ "def", "image_gen", "(", "normalizer", ",", "denorm", ",", "sz", ",", "tfms", "=", "None", ",", "max_zoom", "=", "None", ",", "pad", "=", "0", ",", "crop_type", "=", "None", ",", "tfm_y", "=", "None", ",", "sz_y", "=", "None", ",", "pad_mode", "=", "cv2", ".", "BORDER_REFLECT", ",", "scale", "=", "None", ")", ":", "if", "tfm_y", "is", "None", ":", "tfm_y", "=", "TfmType", ".", "NO", "if", "tfms", "is", "None", ":", "tfms", "=", "[", "]", "elif", "not", "isinstance", "(", "tfms", ",", "collections", ".", "Iterable", ")", ":", "tfms", "=", "[", "tfms", "]", "if", "sz_y", "is", "None", ":", "sz_y", "=", "sz", "if", "scale", "is", "None", ":", "scale", "=", "[", "RandomScale", "(", "sz", ",", "max_zoom", ",", "tfm_y", "=", "tfm_y", ",", "sz_y", "=", "sz_y", ")", "if", "max_zoom", "is", "not", "None", "else", "Scale", "(", "sz", ",", "tfm_y", ",", "sz_y", "=", "sz_y", ")", "]", "elif", "not", "is_listy", "(", "scale", ")", ":", "scale", "=", "[", "scale", "]", "if", "pad", ":", "scale", ".", "append", "(", "AddPadding", "(", "pad", ",", "mode", "=", "pad_mode", ")", ")", "if", "crop_type", "!=", "CropType", ".", "GOOGLENET", ":", "tfms", "=", "scale", "+", "tfms", "return", "Transforms", "(", "sz", ",", "tfms", ",", "normalizer", ",", "denorm", ",", "crop_type", ",", "tfm_y", "=", "tfm_y", ",", "sz_y", "=", "sz_y", ")" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
tfms_from_stats
Given the statistics of the training image sets, returns separate training and validation transform functions
old/fastai/transforms.py
def tfms_from_stats(stats, sz, aug_tfms=None, max_zoom=None, pad=0, crop_type=CropType.RANDOM, tfm_y=None, sz_y=None, pad_mode=cv2.BORDER_REFLECT, norm_y=True, scale=None): """ Given the statistics of the training image sets, returns separate training and validation transform functions """ if aug_tfms is None: aug_tfms=[] tfm_norm = Normalize(*stats, tfm_y=tfm_y if norm_y else TfmType.NO) if stats is not None else None tfm_denorm = Denormalize(*stats) if stats is not None else None val_crop = CropType.CENTER if crop_type in (CropType.RANDOM,CropType.GOOGLENET) else crop_type val_tfm = image_gen(tfm_norm, tfm_denorm, sz, pad=pad, crop_type=val_crop, tfm_y=tfm_y, sz_y=sz_y, scale=scale) trn_tfm = image_gen(tfm_norm, tfm_denorm, sz, pad=pad, crop_type=crop_type, tfm_y=tfm_y, sz_y=sz_y, tfms=aug_tfms, max_zoom=max_zoom, pad_mode=pad_mode, scale=scale) return trn_tfm, val_tfm
def tfms_from_stats(stats, sz, aug_tfms=None, max_zoom=None, pad=0, crop_type=CropType.RANDOM, tfm_y=None, sz_y=None, pad_mode=cv2.BORDER_REFLECT, norm_y=True, scale=None): """ Given the statistics of the training image sets, returns separate training and validation transform functions """ if aug_tfms is None: aug_tfms=[] tfm_norm = Normalize(*stats, tfm_y=tfm_y if norm_y else TfmType.NO) if stats is not None else None tfm_denorm = Denormalize(*stats) if stats is not None else None val_crop = CropType.CENTER if crop_type in (CropType.RANDOM,CropType.GOOGLENET) else crop_type val_tfm = image_gen(tfm_norm, tfm_denorm, sz, pad=pad, crop_type=val_crop, tfm_y=tfm_y, sz_y=sz_y, scale=scale) trn_tfm = image_gen(tfm_norm, tfm_denorm, sz, pad=pad, crop_type=crop_type, tfm_y=tfm_y, sz_y=sz_y, tfms=aug_tfms, max_zoom=max_zoom, pad_mode=pad_mode, scale=scale) return trn_tfm, val_tfm
[ "Given", "the", "statistics", "of", "the", "training", "image", "sets", "returns", "separate", "training", "and", "validation", "transform", "functions" ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/old/fastai/transforms.py#L716-L728
[ "def", "tfms_from_stats", "(", "stats", ",", "sz", ",", "aug_tfms", "=", "None", ",", "max_zoom", "=", "None", ",", "pad", "=", "0", ",", "crop_type", "=", "CropType", ".", "RANDOM", ",", "tfm_y", "=", "None", ",", "sz_y", "=", "None", ",", "pad_mode", "=", "cv2", ".", "BORDER_REFLECT", ",", "norm_y", "=", "True", ",", "scale", "=", "None", ")", ":", "if", "aug_tfms", "is", "None", ":", "aug_tfms", "=", "[", "]", "tfm_norm", "=", "Normalize", "(", "*", "stats", ",", "tfm_y", "=", "tfm_y", "if", "norm_y", "else", "TfmType", ".", "NO", ")", "if", "stats", "is", "not", "None", "else", "None", "tfm_denorm", "=", "Denormalize", "(", "*", "stats", ")", "if", "stats", "is", "not", "None", "else", "None", "val_crop", "=", "CropType", ".", "CENTER", "if", "crop_type", "in", "(", "CropType", ".", "RANDOM", ",", "CropType", ".", "GOOGLENET", ")", "else", "crop_type", "val_tfm", "=", "image_gen", "(", "tfm_norm", ",", "tfm_denorm", ",", "sz", ",", "pad", "=", "pad", ",", "crop_type", "=", "val_crop", ",", "tfm_y", "=", "tfm_y", ",", "sz_y", "=", "sz_y", ",", "scale", "=", "scale", ")", "trn_tfm", "=", "image_gen", "(", "tfm_norm", ",", "tfm_denorm", ",", "sz", ",", "pad", "=", "pad", ",", "crop_type", "=", "crop_type", ",", "tfm_y", "=", "tfm_y", ",", "sz_y", "=", "sz_y", ",", "tfms", "=", "aug_tfms", ",", "max_zoom", "=", "max_zoom", ",", "pad_mode", "=", "pad_mode", ",", "scale", "=", "scale", ")", "return", "trn_tfm", ",", "val_tfm" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
tfms_from_model
Returns separate transformers of images for training and validation. Transformers are constructed according to the image statistics given by the model. (See tfms_from_stats) Arguments: f_model: model, pretrained or not pretrained
old/fastai/transforms.py
def tfms_from_model(f_model, sz, aug_tfms=None, max_zoom=None, pad=0, crop_type=CropType.RANDOM, tfm_y=None, sz_y=None, pad_mode=cv2.BORDER_REFLECT, norm_y=True, scale=None): """ Returns separate transformers of images for training and validation. Transformers are constructed according to the image statistics given by the model. (See tfms_from_stats) Arguments: f_model: model, pretrained or not pretrained """ stats = inception_stats if f_model in inception_models else imagenet_stats return tfms_from_stats(stats, sz, aug_tfms, max_zoom=max_zoom, pad=pad, crop_type=crop_type, tfm_y=tfm_y, sz_y=sz_y, pad_mode=pad_mode, norm_y=norm_y, scale=scale)
def tfms_from_model(f_model, sz, aug_tfms=None, max_zoom=None, pad=0, crop_type=CropType.RANDOM, tfm_y=None, sz_y=None, pad_mode=cv2.BORDER_REFLECT, norm_y=True, scale=None): """ Returns separate transformers of images for training and validation. Transformers are constructed according to the image statistics given by the model. (See tfms_from_stats) Arguments: f_model: model, pretrained or not pretrained """ stats = inception_stats if f_model in inception_models else imagenet_stats return tfms_from_stats(stats, sz, aug_tfms, max_zoom=max_zoom, pad=pad, crop_type=crop_type, tfm_y=tfm_y, sz_y=sz_y, pad_mode=pad_mode, norm_y=norm_y, scale=scale)
[ "Returns", "separate", "transformers", "of", "images", "for", "training", "and", "validation", ".", "Transformers", "are", "constructed", "according", "to", "the", "image", "statistics", "given", "by", "the", "model", ".", "(", "See", "tfms_from_stats", ")" ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/old/fastai/transforms.py#L731-L741
[ "def", "tfms_from_model", "(", "f_model", ",", "sz", ",", "aug_tfms", "=", "None", ",", "max_zoom", "=", "None", ",", "pad", "=", "0", ",", "crop_type", "=", "CropType", ".", "RANDOM", ",", "tfm_y", "=", "None", ",", "sz_y", "=", "None", ",", "pad_mode", "=", "cv2", ".", "BORDER_REFLECT", ",", "norm_y", "=", "True", ",", "scale", "=", "None", ")", ":", "stats", "=", "inception_stats", "if", "f_model", "in", "inception_models", "else", "imagenet_stats", "return", "tfms_from_stats", "(", "stats", ",", "sz", ",", "aug_tfms", ",", "max_zoom", "=", "max_zoom", ",", "pad", "=", "pad", ",", "crop_type", "=", "crop_type", ",", "tfm_y", "=", "tfm_y", ",", "sz_y", "=", "sz_y", ",", "pad_mode", "=", "pad_mode", ",", "norm_y", "=", "norm_y", ",", "scale", "=", "scale", ")" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
get_image_files
Return list of files in `c` that are images. `check_ext` will filter to `image_extensions`.
fastai/vision/data.py
def get_image_files(c:PathOrStr, check_ext:bool=True, recurse=False)->FilePathList: "Return list of files in `c` that are images. `check_ext` will filter to `image_extensions`." return get_files(c, extensions=(image_extensions if check_ext else None), recurse=recurse)
def get_image_files(c:PathOrStr, check_ext:bool=True, recurse=False)->FilePathList: "Return list of files in `c` that are images. `check_ext` will filter to `image_extensions`." return get_files(c, extensions=(image_extensions if check_ext else None), recurse=recurse)
[ "Return", "list", "of", "files", "in", "c", "that", "are", "images", ".", "check_ext", "will", "filter", "to", "image_extensions", "." ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/vision/data.py#L19-L21
[ "def", "get_image_files", "(", "c", ":", "PathOrStr", ",", "check_ext", ":", "bool", "=", "True", ",", "recurse", "=", "False", ")", "->", "FilePathList", ":", "return", "get_files", "(", "c", ",", "extensions", "=", "(", "image_extensions", "if", "check_ext", "else", "None", ")", ",", "recurse", "=", "recurse", ")" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
get_annotations
Open a COCO style json in `fname` and returns the lists of filenames (with maybe `prefix`) and labelled bboxes.
fastai/vision/data.py
def get_annotations(fname, prefix=None): "Open a COCO style json in `fname` and returns the lists of filenames (with maybe `prefix`) and labelled bboxes." annot_dict = json.load(open(fname)) id2images, id2bboxes, id2cats = {}, collections.defaultdict(list), collections.defaultdict(list) classes = {} for o in annot_dict['categories']: classes[o['id']] = o['name'] for o in annot_dict['annotations']: bb = o['bbox'] id2bboxes[o['image_id']].append([bb[1],bb[0], bb[3]+bb[1], bb[2]+bb[0]]) id2cats[o['image_id']].append(classes[o['category_id']]) for o in annot_dict['images']: if o['id'] in id2bboxes: id2images[o['id']] = ifnone(prefix, '') + o['file_name'] ids = list(id2images.keys()) return [id2images[k] for k in ids], [[id2bboxes[k], id2cats[k]] for k in ids]
def get_annotations(fname, prefix=None): "Open a COCO style json in `fname` and returns the lists of filenames (with maybe `prefix`) and labelled bboxes." annot_dict = json.load(open(fname)) id2images, id2bboxes, id2cats = {}, collections.defaultdict(list), collections.defaultdict(list) classes = {} for o in annot_dict['categories']: classes[o['id']] = o['name'] for o in annot_dict['annotations']: bb = o['bbox'] id2bboxes[o['image_id']].append([bb[1],bb[0], bb[3]+bb[1], bb[2]+bb[0]]) id2cats[o['image_id']].append(classes[o['category_id']]) for o in annot_dict['images']: if o['id'] in id2bboxes: id2images[o['id']] = ifnone(prefix, '') + o['file_name'] ids = list(id2images.keys()) return [id2images[k] for k in ids], [[id2bboxes[k], id2cats[k]] for k in ids]
[ "Open", "a", "COCO", "style", "json", "in", "fname", "and", "returns", "the", "lists", "of", "filenames", "(", "with", "maybe", "prefix", ")", "and", "labelled", "bboxes", "." ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/vision/data.py#L23-L38
[ "def", "get_annotations", "(", "fname", ",", "prefix", "=", "None", ")", ":", "annot_dict", "=", "json", ".", "load", "(", "open", "(", "fname", ")", ")", "id2images", ",", "id2bboxes", ",", "id2cats", "=", "{", "}", ",", "collections", ".", "defaultdict", "(", "list", ")", ",", "collections", ".", "defaultdict", "(", "list", ")", "classes", "=", "{", "}", "for", "o", "in", "annot_dict", "[", "'categories'", "]", ":", "classes", "[", "o", "[", "'id'", "]", "]", "=", "o", "[", "'name'", "]", "for", "o", "in", "annot_dict", "[", "'annotations'", "]", ":", "bb", "=", "o", "[", "'bbox'", "]", "id2bboxes", "[", "o", "[", "'image_id'", "]", "]", ".", "append", "(", "[", "bb", "[", "1", "]", ",", "bb", "[", "0", "]", ",", "bb", "[", "3", "]", "+", "bb", "[", "1", "]", ",", "bb", "[", "2", "]", "+", "bb", "[", "0", "]", "]", ")", "id2cats", "[", "o", "[", "'image_id'", "]", "]", ".", "append", "(", "classes", "[", "o", "[", "'category_id'", "]", "]", ")", "for", "o", "in", "annot_dict", "[", "'images'", "]", ":", "if", "o", "[", "'id'", "]", "in", "id2bboxes", ":", "id2images", "[", "o", "[", "'id'", "]", "]", "=", "ifnone", "(", "prefix", ",", "''", ")", "+", "o", "[", "'file_name'", "]", "ids", "=", "list", "(", "id2images", ".", "keys", "(", ")", ")", "return", "[", "id2images", "[", "k", "]", "for", "k", "in", "ids", "]", ",", "[", "[", "id2bboxes", "[", "k", "]", ",", "id2cats", "[", "k", "]", "]", "for", "k", "in", "ids", "]" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
bb_pad_collate
Function that collect `samples` of labelled bboxes and adds padding with `pad_idx`.
fastai/vision/data.py
def bb_pad_collate(samples:BatchSamples, pad_idx:int=0) -> Tuple[FloatTensor, Tuple[LongTensor, LongTensor]]: "Function that collect `samples` of labelled bboxes and adds padding with `pad_idx`." if isinstance(samples[0][1], int): return data_collate(samples) max_len = max([len(s[1].data[1]) for s in samples]) bboxes = torch.zeros(len(samples), max_len, 4) labels = torch.zeros(len(samples), max_len).long() + pad_idx imgs = [] for i,s in enumerate(samples): imgs.append(s[0].data[None]) bbs, lbls = s[1].data if not (bbs.nelement() == 0): bboxes[i,-len(lbls):] = bbs labels[i,-len(lbls):] = tensor(lbls) return torch.cat(imgs,0), (bboxes,labels)
def bb_pad_collate(samples:BatchSamples, pad_idx:int=0) -> Tuple[FloatTensor, Tuple[LongTensor, LongTensor]]: "Function that collect `samples` of labelled bboxes and adds padding with `pad_idx`." if isinstance(samples[0][1], int): return data_collate(samples) max_len = max([len(s[1].data[1]) for s in samples]) bboxes = torch.zeros(len(samples), max_len, 4) labels = torch.zeros(len(samples), max_len).long() + pad_idx imgs = [] for i,s in enumerate(samples): imgs.append(s[0].data[None]) bbs, lbls = s[1].data if not (bbs.nelement() == 0): bboxes[i,-len(lbls):] = bbs labels[i,-len(lbls):] = tensor(lbls) return torch.cat(imgs,0), (bboxes,labels)
[ "Function", "that", "collect", "samples", "of", "labelled", "bboxes", "and", "adds", "padding", "with", "pad_idx", "." ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/vision/data.py#L40-L53
[ "def", "bb_pad_collate", "(", "samples", ":", "BatchSamples", ",", "pad_idx", ":", "int", "=", "0", ")", "->", "Tuple", "[", "FloatTensor", ",", "Tuple", "[", "LongTensor", ",", "LongTensor", "]", "]", ":", "if", "isinstance", "(", "samples", "[", "0", "]", "[", "1", "]", ",", "int", ")", ":", "return", "data_collate", "(", "samples", ")", "max_len", "=", "max", "(", "[", "len", "(", "s", "[", "1", "]", ".", "data", "[", "1", "]", ")", "for", "s", "in", "samples", "]", ")", "bboxes", "=", "torch", ".", "zeros", "(", "len", "(", "samples", ")", ",", "max_len", ",", "4", ")", "labels", "=", "torch", ".", "zeros", "(", "len", "(", "samples", ")", ",", "max_len", ")", ".", "long", "(", ")", "+", "pad_idx", "imgs", "=", "[", "]", "for", "i", ",", "s", "in", "enumerate", "(", "samples", ")", ":", "imgs", ".", "append", "(", "s", "[", "0", "]", ".", "data", "[", "None", "]", ")", "bbs", ",", "lbls", "=", "s", "[", "1", "]", ".", "data", "if", "not", "(", "bbs", ".", "nelement", "(", ")", "==", "0", ")", ":", "bboxes", "[", "i", ",", "-", "len", "(", "lbls", ")", ":", "]", "=", "bbs", "labels", "[", "i", ",", "-", "len", "(", "lbls", ")", ":", "]", "=", "tensor", "(", "lbls", ")", "return", "torch", ".", "cat", "(", "imgs", ",", "0", ")", ",", "(", "bboxes", ",", "labels", ")" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
normalize
Normalize `x` with `mean` and `std`.
fastai/vision/data.py
def normalize(x:TensorImage, mean:FloatTensor,std:FloatTensor)->TensorImage: "Normalize `x` with `mean` and `std`." return (x-mean[...,None,None]) / std[...,None,None]
def normalize(x:TensorImage, mean:FloatTensor,std:FloatTensor)->TensorImage: "Normalize `x` with `mean` and `std`." return (x-mean[...,None,None]) / std[...,None,None]
[ "Normalize", "x", "with", "mean", "and", "std", "." ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/vision/data.py#L55-L57
[ "def", "normalize", "(", "x", ":", "TensorImage", ",", "mean", ":", "FloatTensor", ",", "std", ":", "FloatTensor", ")", "->", "TensorImage", ":", "return", "(", "x", "-", "mean", "[", "...", ",", "None", ",", "None", "]", ")", "/", "std", "[", "...", ",", "None", ",", "None", "]" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
denormalize
Denormalize `x` with `mean` and `std`.
fastai/vision/data.py
def denormalize(x:TensorImage, mean:FloatTensor,std:FloatTensor, do_x:bool=True)->TensorImage: "Denormalize `x` with `mean` and `std`." return x.cpu().float()*std[...,None,None] + mean[...,None,None] if do_x else x.cpu()
def denormalize(x:TensorImage, mean:FloatTensor,std:FloatTensor, do_x:bool=True)->TensorImage: "Denormalize `x` with `mean` and `std`." return x.cpu().float()*std[...,None,None] + mean[...,None,None] if do_x else x.cpu()
[ "Denormalize", "x", "with", "mean", "and", "std", "." ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/vision/data.py#L59-L61
[ "def", "denormalize", "(", "x", ":", "TensorImage", ",", "mean", ":", "FloatTensor", ",", "std", ":", "FloatTensor", ",", "do_x", ":", "bool", "=", "True", ")", "->", "TensorImage", ":", "return", "x", ".", "cpu", "(", ")", ".", "float", "(", ")", "*", "std", "[", "...", ",", "None", ",", "None", "]", "+", "mean", "[", "...", ",", "None", ",", "None", "]", "if", "do_x", "else", "x", ".", "cpu", "(", ")" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
_normalize_batch
`b` = `x`,`y` - normalize `x` array of imgs and `do_y` optionally `y`.
fastai/vision/data.py
def _normalize_batch(b:Tuple[Tensor,Tensor], mean:FloatTensor, std:FloatTensor, do_x:bool=True, do_y:bool=False)->Tuple[Tensor,Tensor]: "`b` = `x`,`y` - normalize `x` array of imgs and `do_y` optionally `y`." x,y = b mean,std = mean.to(x.device),std.to(x.device) if do_x: x = normalize(x,mean,std) if do_y and len(y.shape) == 4: y = normalize(y,mean,std) return x,y
def _normalize_batch(b:Tuple[Tensor,Tensor], mean:FloatTensor, std:FloatTensor, do_x:bool=True, do_y:bool=False)->Tuple[Tensor,Tensor]: "`b` = `x`,`y` - normalize `x` array of imgs and `do_y` optionally `y`." x,y = b mean,std = mean.to(x.device),std.to(x.device) if do_x: x = normalize(x,mean,std) if do_y and len(y.shape) == 4: y = normalize(y,mean,std) return x,y
[ "b", "=", "x", "y", "-", "normalize", "x", "array", "of", "imgs", "and", "do_y", "optionally", "y", "." ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/vision/data.py#L63-L69
[ "def", "_normalize_batch", "(", "b", ":", "Tuple", "[", "Tensor", ",", "Tensor", "]", ",", "mean", ":", "FloatTensor", ",", "std", ":", "FloatTensor", ",", "do_x", ":", "bool", "=", "True", ",", "do_y", ":", "bool", "=", "False", ")", "->", "Tuple", "[", "Tensor", ",", "Tensor", "]", ":", "x", ",", "y", "=", "b", "mean", ",", "std", "=", "mean", ".", "to", "(", "x", ".", "device", ")", ",", "std", ".", "to", "(", "x", ".", "device", ")", "if", "do_x", ":", "x", "=", "normalize", "(", "x", ",", "mean", ",", "std", ")", "if", "do_y", "and", "len", "(", "y", ".", "shape", ")", "==", "4", ":", "y", "=", "normalize", "(", "y", ",", "mean", ",", "std", ")", "return", "x", ",", "y" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
normalize_funcs
Create normalize/denormalize func using `mean` and `std`, can specify `do_y` and `device`.
fastai/vision/data.py
def normalize_funcs(mean:FloatTensor, std:FloatTensor, do_x:bool=True, do_y:bool=False)->Tuple[Callable,Callable]: "Create normalize/denormalize func using `mean` and `std`, can specify `do_y` and `device`." mean,std = tensor(mean),tensor(std) return (partial(_normalize_batch, mean=mean, std=std, do_x=do_x, do_y=do_y), partial(denormalize, mean=mean, std=std, do_x=do_x))
def normalize_funcs(mean:FloatTensor, std:FloatTensor, do_x:bool=True, do_y:bool=False)->Tuple[Callable,Callable]: "Create normalize/denormalize func using `mean` and `std`, can specify `do_y` and `device`." mean,std = tensor(mean),tensor(std) return (partial(_normalize_batch, mean=mean, std=std, do_x=do_x, do_y=do_y), partial(denormalize, mean=mean, std=std, do_x=do_x))
[ "Create", "normalize", "/", "denormalize", "func", "using", "mean", "and", "std", "can", "specify", "do_y", "and", "device", "." ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/vision/data.py#L71-L75
[ "def", "normalize_funcs", "(", "mean", ":", "FloatTensor", ",", "std", ":", "FloatTensor", ",", "do_x", ":", "bool", "=", "True", ",", "do_y", ":", "bool", "=", "False", ")", "->", "Tuple", "[", "Callable", ",", "Callable", "]", ":", "mean", ",", "std", "=", "tensor", "(", "mean", ")", ",", "tensor", "(", "std", ")", "return", "(", "partial", "(", "_normalize_batch", ",", "mean", "=", "mean", ",", "std", "=", "std", ",", "do_x", "=", "do_x", ",", "do_y", "=", "do_y", ")", ",", "partial", "(", "denormalize", ",", "mean", "=", "mean", ",", "std", "=", "std", ",", "do_x", "=", "do_x", ")", ")" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
channel_view
Make channel the first axis of `x` and flatten remaining axes
fastai/vision/data.py
def channel_view(x:Tensor)->Tensor: "Make channel the first axis of `x` and flatten remaining axes" return x.transpose(0,1).contiguous().view(x.shape[1],-1)
def channel_view(x:Tensor)->Tensor: "Make channel the first axis of `x` and flatten remaining axes" return x.transpose(0,1).contiguous().view(x.shape[1],-1)
[ "Make", "channel", "the", "first", "axis", "of", "x", "and", "flatten", "remaining", "axes" ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/vision/data.py#L81-L83
[ "def", "channel_view", "(", "x", ":", "Tensor", ")", "->", "Tensor", ":", "return", "x", ".", "transpose", "(", "0", ",", "1", ")", ".", "contiguous", "(", ")", ".", "view", "(", "x", ".", "shape", "[", "1", "]", ",", "-", "1", ")" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
download_images
Download images listed in text file `urls` to path `dest`, at most `max_pics`
fastai/vision/data.py
def download_images(urls:Collection[str], dest:PathOrStr, max_pics:int=1000, max_workers:int=8, timeout=4): "Download images listed in text file `urls` to path `dest`, at most `max_pics`" urls = open(urls).read().strip().split("\n")[:max_pics] dest = Path(dest) dest.mkdir(exist_ok=True) parallel(partial(_download_image_inner, dest, timeout=timeout), urls, max_workers=max_workers)
def download_images(urls:Collection[str], dest:PathOrStr, max_pics:int=1000, max_workers:int=8, timeout=4): "Download images listed in text file `urls` to path `dest`, at most `max_pics`" urls = open(urls).read().strip().split("\n")[:max_pics] dest = Path(dest) dest.mkdir(exist_ok=True) parallel(partial(_download_image_inner, dest, timeout=timeout), urls, max_workers=max_workers)
[ "Download", "images", "listed", "in", "text", "file", "urls", "to", "path", "dest", "at", "most", "max_pics" ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/vision/data.py#L191-L196
[ "def", "download_images", "(", "urls", ":", "Collection", "[", "str", "]", ",", "dest", ":", "PathOrStr", ",", "max_pics", ":", "int", "=", "1000", ",", "max_workers", ":", "int", "=", "8", ",", "timeout", "=", "4", ")", ":", "urls", "=", "open", "(", "urls", ")", ".", "read", "(", ")", ".", "strip", "(", ")", ".", "split", "(", "\"\\n\"", ")", "[", ":", "max_pics", "]", "dest", "=", "Path", "(", "dest", ")", "dest", ".", "mkdir", "(", "exist_ok", "=", "True", ")", "parallel", "(", "partial", "(", "_download_image_inner", ",", "dest", ",", "timeout", "=", "timeout", ")", ",", "urls", ",", "max_workers", "=", "max_workers", ")" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
resize_to
Size to resize to, to hit `targ_sz` at same aspect ratio, in PIL coords (i.e w*h)
fastai/vision/data.py
def resize_to(img, targ_sz:int, use_min:bool=False): "Size to resize to, to hit `targ_sz` at same aspect ratio, in PIL coords (i.e w*h)" w,h = img.size min_sz = (min if use_min else max)(w,h) ratio = targ_sz/min_sz return int(w*ratio),int(h*ratio)
def resize_to(img, targ_sz:int, use_min:bool=False): "Size to resize to, to hit `targ_sz` at same aspect ratio, in PIL coords (i.e w*h)" w,h = img.size min_sz = (min if use_min else max)(w,h) ratio = targ_sz/min_sz return int(w*ratio),int(h*ratio)
[ "Size", "to", "resize", "to", "to", "hit", "targ_sz", "at", "same", "aspect", "ratio", "in", "PIL", "coords", "(", "i", ".", "e", "w", "*", "h", ")" ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/vision/data.py#L198-L203
[ "def", "resize_to", "(", "img", ",", "targ_sz", ":", "int", ",", "use_min", ":", "bool", "=", "False", ")", ":", "w", ",", "h", "=", "img", ".", "size", "min_sz", "=", "(", "min", "if", "use_min", "else", "max", ")", "(", "w", ",", "h", ")", "ratio", "=", "targ_sz", "/", "min_sz", "return", "int", "(", "w", "*", "ratio", ")", ",", "int", "(", "h", "*", "ratio", ")" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
verify_image
Check if the image in `file` exists, maybe resize it and copy it in `dest`.
fastai/vision/data.py
def verify_image(file:Path, idx:int, delete:bool, max_size:Union[int,Tuple[int,int]]=None, dest:Path=None, n_channels:int=3, interp=PIL.Image.BILINEAR, ext:str=None, img_format:str=None, resume:bool=False, **kwargs): "Check if the image in `file` exists, maybe resize it and copy it in `dest`." try: # deal with partially broken images as indicated by PIL warnings with warnings.catch_warnings(): warnings.filterwarnings('error') try: with open(file, 'rb') as img_file: PIL.Image.open(img_file) except Warning as w: if "Possibly corrupt EXIF data" in str(w): if delete: # green light to modify files print(f"{file}: Removing corrupt EXIF data") warnings.simplefilter("ignore") # save EXIF-cleaned up image, which happens automatically PIL.Image.open(file).save(file) else: # keep user's files intact print(f"{file}: Not removing corrupt EXIF data, pass `delete=True` to do that") else: warnings.warn(w) img = PIL.Image.open(file) imgarr = np.array(img) img_channels = 1 if len(imgarr.shape) == 2 else imgarr.shape[2] if (max_size is not None and (img.height > max_size or img.width > max_size)) or img_channels != n_channels: assert isinstance(dest, Path), "You should provide `dest` Path to save resized image" dest_fname = dest/file.name if ext is not None: dest_fname=dest_fname.with_suffix(ext) if resume and os.path.isfile(dest_fname): return if max_size is not None: new_sz = resize_to(img, max_size) img = img.resize(new_sz, resample=interp) if n_channels == 3: img = img.convert("RGB") img.save(dest_fname, img_format, **kwargs) except Exception as e: print(f'{e}') if delete: file.unlink()
def verify_image(file:Path, idx:int, delete:bool, max_size:Union[int,Tuple[int,int]]=None, dest:Path=None, n_channels:int=3, interp=PIL.Image.BILINEAR, ext:str=None, img_format:str=None, resume:bool=False, **kwargs): "Check if the image in `file` exists, maybe resize it and copy it in `dest`." try: # deal with partially broken images as indicated by PIL warnings with warnings.catch_warnings(): warnings.filterwarnings('error') try: with open(file, 'rb') as img_file: PIL.Image.open(img_file) except Warning as w: if "Possibly corrupt EXIF data" in str(w): if delete: # green light to modify files print(f"{file}: Removing corrupt EXIF data") warnings.simplefilter("ignore") # save EXIF-cleaned up image, which happens automatically PIL.Image.open(file).save(file) else: # keep user's files intact print(f"{file}: Not removing corrupt EXIF data, pass `delete=True` to do that") else: warnings.warn(w) img = PIL.Image.open(file) imgarr = np.array(img) img_channels = 1 if len(imgarr.shape) == 2 else imgarr.shape[2] if (max_size is not None and (img.height > max_size or img.width > max_size)) or img_channels != n_channels: assert isinstance(dest, Path), "You should provide `dest` Path to save resized image" dest_fname = dest/file.name if ext is not None: dest_fname=dest_fname.with_suffix(ext) if resume and os.path.isfile(dest_fname): return if max_size is not None: new_sz = resize_to(img, max_size) img = img.resize(new_sz, resample=interp) if n_channels == 3: img = img.convert("RGB") img.save(dest_fname, img_format, **kwargs) except Exception as e: print(f'{e}') if delete: file.unlink()
[ "Check", "if", "the", "image", "in", "file", "exists", "maybe", "resize", "it", "and", "copy", "it", "in", "dest", "." ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/vision/data.py#L205-L240
[ "def", "verify_image", "(", "file", ":", "Path", ",", "idx", ":", "int", ",", "delete", ":", "bool", ",", "max_size", ":", "Union", "[", "int", ",", "Tuple", "[", "int", ",", "int", "]", "]", "=", "None", ",", "dest", ":", "Path", "=", "None", ",", "n_channels", ":", "int", "=", "3", ",", "interp", "=", "PIL", ".", "Image", ".", "BILINEAR", ",", "ext", ":", "str", "=", "None", ",", "img_format", ":", "str", "=", "None", ",", "resume", ":", "bool", "=", "False", ",", "*", "*", "kwargs", ")", ":", "try", ":", "# deal with partially broken images as indicated by PIL warnings", "with", "warnings", ".", "catch_warnings", "(", ")", ":", "warnings", ".", "filterwarnings", "(", "'error'", ")", "try", ":", "with", "open", "(", "file", ",", "'rb'", ")", "as", "img_file", ":", "PIL", ".", "Image", ".", "open", "(", "img_file", ")", "except", "Warning", "as", "w", ":", "if", "\"Possibly corrupt EXIF data\"", "in", "str", "(", "w", ")", ":", "if", "delete", ":", "# green light to modify files", "print", "(", "f\"{file}: Removing corrupt EXIF data\"", ")", "warnings", ".", "simplefilter", "(", "\"ignore\"", ")", "# save EXIF-cleaned up image, which happens automatically", "PIL", ".", "Image", ".", "open", "(", "file", ")", ".", "save", "(", "file", ")", "else", ":", "# keep user's files intact", "print", "(", "f\"{file}: Not removing corrupt EXIF data, pass `delete=True` to do that\"", ")", "else", ":", "warnings", ".", "warn", "(", "w", ")", "img", "=", "PIL", ".", "Image", ".", "open", "(", "file", ")", "imgarr", "=", "np", ".", "array", "(", "img", ")", "img_channels", "=", "1", "if", "len", "(", "imgarr", ".", "shape", ")", "==", "2", "else", "imgarr", ".", "shape", "[", "2", "]", "if", "(", "max_size", "is", "not", "None", "and", "(", "img", ".", "height", ">", "max_size", "or", "img", ".", "width", ">", "max_size", ")", ")", "or", "img_channels", "!=", "n_channels", ":", "assert", "isinstance", "(", "dest", ",", "Path", ")", ",", "\"You should provide `dest` Path to save resized image\"", "dest_fname", "=", "dest", "/", "file", ".", "name", "if", "ext", "is", "not", "None", ":", "dest_fname", "=", "dest_fname", ".", "with_suffix", "(", "ext", ")", "if", "resume", "and", "os", ".", "path", ".", "isfile", "(", "dest_fname", ")", ":", "return", "if", "max_size", "is", "not", "None", ":", "new_sz", "=", "resize_to", "(", "img", ",", "max_size", ")", "img", "=", "img", ".", "resize", "(", "new_sz", ",", "resample", "=", "interp", ")", "if", "n_channels", "==", "3", ":", "img", "=", "img", ".", "convert", "(", "\"RGB\"", ")", "img", ".", "save", "(", "dest_fname", ",", "img_format", ",", "*", "*", "kwargs", ")", "except", "Exception", "as", "e", ":", "print", "(", "f'{e}'", ")", "if", "delete", ":", "file", ".", "unlink", "(", ")" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
verify_images
Check if the images in `path` aren't broken, maybe resize them and copy it in `dest`.
fastai/vision/data.py
def verify_images(path:PathOrStr, delete:bool=True, max_workers:int=4, max_size:Union[int]=None, recurse:bool=False, dest:PathOrStr='.', n_channels:int=3, interp=PIL.Image.BILINEAR, ext:str=None, img_format:str=None, resume:bool=None, **kwargs): "Check if the images in `path` aren't broken, maybe resize them and copy it in `dest`." path = Path(path) if resume is None and dest == '.': resume=False dest = path/Path(dest) os.makedirs(dest, exist_ok=True) files = get_image_files(path, recurse=recurse) func = partial(verify_image, delete=delete, max_size=max_size, dest=dest, n_channels=n_channels, interp=interp, ext=ext, img_format=img_format, resume=resume, **kwargs) parallel(func, files, max_workers=max_workers)
def verify_images(path:PathOrStr, delete:bool=True, max_workers:int=4, max_size:Union[int]=None, recurse:bool=False, dest:PathOrStr='.', n_channels:int=3, interp=PIL.Image.BILINEAR, ext:str=None, img_format:str=None, resume:bool=None, **kwargs): "Check if the images in `path` aren't broken, maybe resize them and copy it in `dest`." path = Path(path) if resume is None and dest == '.': resume=False dest = path/Path(dest) os.makedirs(dest, exist_ok=True) files = get_image_files(path, recurse=recurse) func = partial(verify_image, delete=delete, max_size=max_size, dest=dest, n_channels=n_channels, interp=interp, ext=ext, img_format=img_format, resume=resume, **kwargs) parallel(func, files, max_workers=max_workers)
[ "Check", "if", "the", "images", "in", "path", "aren", "t", "broken", "maybe", "resize", "them", "and", "copy", "it", "in", "dest", "." ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/vision/data.py#L242-L253
[ "def", "verify_images", "(", "path", ":", "PathOrStr", ",", "delete", ":", "bool", "=", "True", ",", "max_workers", ":", "int", "=", "4", ",", "max_size", ":", "Union", "[", "int", "]", "=", "None", ",", "recurse", ":", "bool", "=", "False", ",", "dest", ":", "PathOrStr", "=", "'.'", ",", "n_channels", ":", "int", "=", "3", ",", "interp", "=", "PIL", ".", "Image", ".", "BILINEAR", ",", "ext", ":", "str", "=", "None", ",", "img_format", ":", "str", "=", "None", ",", "resume", ":", "bool", "=", "None", ",", "*", "*", "kwargs", ")", ":", "path", "=", "Path", "(", "path", ")", "if", "resume", "is", "None", "and", "dest", "==", "'.'", ":", "resume", "=", "False", "dest", "=", "path", "/", "Path", "(", "dest", ")", "os", ".", "makedirs", "(", "dest", ",", "exist_ok", "=", "True", ")", "files", "=", "get_image_files", "(", "path", ",", "recurse", "=", "recurse", ")", "func", "=", "partial", "(", "verify_image", ",", "delete", "=", "delete", ",", "max_size", "=", "max_size", ",", "dest", "=", "dest", ",", "n_channels", "=", "n_channels", ",", "interp", "=", "interp", ",", "ext", "=", "ext", ",", "img_format", "=", "img_format", ",", "resume", "=", "resume", ",", "*", "*", "kwargs", ")", "parallel", "(", "func", ",", "files", ",", "max_workers", "=", "max_workers", ")" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
_ll_pre_transform
Call `train_tfm` and `valid_tfm` after opening image, before converting from `PIL.Image`
fastai/vision/data.py
def _ll_pre_transform(self, train_tfm:List[Callable], valid_tfm:List[Callable]): "Call `train_tfm` and `valid_tfm` after opening image, before converting from `PIL.Image`" self.train.x.after_open = compose(train_tfm) self.valid.x.after_open = compose(valid_tfm) return self
def _ll_pre_transform(self, train_tfm:List[Callable], valid_tfm:List[Callable]): "Call `train_tfm` and `valid_tfm` after opening image, before converting from `PIL.Image`" self.train.x.after_open = compose(train_tfm) self.valid.x.after_open = compose(valid_tfm) return self
[ "Call", "train_tfm", "and", "valid_tfm", "after", "opening", "image", "before", "converting", "from", "PIL", ".", "Image" ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/vision/data.py#L434-L438
[ "def", "_ll_pre_transform", "(", "self", ",", "train_tfm", ":", "List", "[", "Callable", "]", ",", "valid_tfm", ":", "List", "[", "Callable", "]", ")", ":", "self", ".", "train", ".", "x", ".", "after_open", "=", "compose", "(", "train_tfm", ")", "self", ".", "valid", ".", "x", ".", "after_open", "=", "compose", "(", "valid_tfm", ")", "return", "self" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
_db_pre_transform
Call `train_tfm` and `valid_tfm` after opening image, before converting from `PIL.Image`
fastai/vision/data.py
def _db_pre_transform(self, train_tfm:List[Callable], valid_tfm:List[Callable]): "Call `train_tfm` and `valid_tfm` after opening image, before converting from `PIL.Image`" self.train_ds.x.after_open = compose(train_tfm) self.valid_ds.x.after_open = compose(valid_tfm) return self
def _db_pre_transform(self, train_tfm:List[Callable], valid_tfm:List[Callable]): "Call `train_tfm` and `valid_tfm` after opening image, before converting from `PIL.Image`" self.train_ds.x.after_open = compose(train_tfm) self.valid_ds.x.after_open = compose(valid_tfm) return self
[ "Call", "train_tfm", "and", "valid_tfm", "after", "opening", "image", "before", "converting", "from", "PIL", ".", "Image" ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/vision/data.py#L440-L444
[ "def", "_db_pre_transform", "(", "self", ",", "train_tfm", ":", "List", "[", "Callable", "]", ",", "valid_tfm", ":", "List", "[", "Callable", "]", ")", ":", "self", ".", "train_ds", ".", "x", ".", "after_open", "=", "compose", "(", "train_tfm", ")", "self", ".", "valid_ds", ".", "x", ".", "after_open", "=", "compose", "(", "valid_tfm", ")", "return", "self" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
_presize
Resize images to `size` using `RandomResizedCrop`, passing along `kwargs` to train transform
fastai/vision/data.py
def _presize(self, size:int, val_xtra_size:int=32, scale:Tuple[float]=(0.08, 1.0), ratio:Tuple[float]=(0.75, 4./3.), interpolation:int=2): "Resize images to `size` using `RandomResizedCrop`, passing along `kwargs` to train transform" return self.pre_transform( tvt.RandomResizedCrop(size, scale=scale, ratio=ratio, interpolation=interpolation), [tvt.Resize(size+val_xtra_size), tvt.CenterCrop(size)])
def _presize(self, size:int, val_xtra_size:int=32, scale:Tuple[float]=(0.08, 1.0), ratio:Tuple[float]=(0.75, 4./3.), interpolation:int=2): "Resize images to `size` using `RandomResizedCrop`, passing along `kwargs` to train transform" return self.pre_transform( tvt.RandomResizedCrop(size, scale=scale, ratio=ratio, interpolation=interpolation), [tvt.Resize(size+val_xtra_size), tvt.CenterCrop(size)])
[ "Resize", "images", "to", "size", "using", "RandomResizedCrop", "passing", "along", "kwargs", "to", "train", "transform" ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/vision/data.py#L446-L451
[ "def", "_presize", "(", "self", ",", "size", ":", "int", ",", "val_xtra_size", ":", "int", "=", "32", ",", "scale", ":", "Tuple", "[", "float", "]", "=", "(", "0.08", ",", "1.0", ")", ",", "ratio", ":", "Tuple", "[", "float", "]", "=", "(", "0.75", ",", "4.", "/", "3.", ")", ",", "interpolation", ":", "int", "=", "2", ")", ":", "return", "self", ".", "pre_transform", "(", "tvt", ".", "RandomResizedCrop", "(", "size", ",", "scale", "=", "scale", ",", "ratio", "=", "ratio", ",", "interpolation", "=", "interpolation", ")", ",", "[", "tvt", ".", "Resize", "(", "size", "+", "val_xtra_size", ")", ",", "tvt", ".", "CenterCrop", "(", "size", ")", "]", ")" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
ImageDataBunch.create_from_ll
Create an `ImageDataBunch` from `LabelLists` `lls` with potential `ds_tfms`.
fastai/vision/data.py
def create_from_ll(cls, lls:LabelLists, bs:int=64, val_bs:int=None, ds_tfms:Optional[TfmList]=None, num_workers:int=defaults.cpus, dl_tfms:Optional[Collection[Callable]]=None, device:torch.device=None, test:Optional[PathOrStr]=None, collate_fn:Callable=data_collate, size:int=None, no_check:bool=False, resize_method:ResizeMethod=None, mult:int=None, padding_mode:str='reflection', mode:str='bilinear', tfm_y:bool=False)->'ImageDataBunch': "Create an `ImageDataBunch` from `LabelLists` `lls` with potential `ds_tfms`." lls = lls.transform(tfms=ds_tfms, size=size, resize_method=resize_method, mult=mult, padding_mode=padding_mode, mode=mode, tfm_y=tfm_y) if test is not None: lls.add_test_folder(test) return lls.databunch(bs=bs, val_bs=val_bs, dl_tfms=dl_tfms, num_workers=num_workers, collate_fn=collate_fn, device=device, no_check=no_check)
def create_from_ll(cls, lls:LabelLists, bs:int=64, val_bs:int=None, ds_tfms:Optional[TfmList]=None, num_workers:int=defaults.cpus, dl_tfms:Optional[Collection[Callable]]=None, device:torch.device=None, test:Optional[PathOrStr]=None, collate_fn:Callable=data_collate, size:int=None, no_check:bool=False, resize_method:ResizeMethod=None, mult:int=None, padding_mode:str='reflection', mode:str='bilinear', tfm_y:bool=False)->'ImageDataBunch': "Create an `ImageDataBunch` from `LabelLists` `lls` with potential `ds_tfms`." lls = lls.transform(tfms=ds_tfms, size=size, resize_method=resize_method, mult=mult, padding_mode=padding_mode, mode=mode, tfm_y=tfm_y) if test is not None: lls.add_test_folder(test) return lls.databunch(bs=bs, val_bs=val_bs, dl_tfms=dl_tfms, num_workers=num_workers, collate_fn=collate_fn, device=device, no_check=no_check)
[ "Create", "an", "ImageDataBunch", "from", "LabelLists", "lls", "with", "potential", "ds_tfms", "." ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/vision/data.py#L90-L100
[ "def", "create_from_ll", "(", "cls", ",", "lls", ":", "LabelLists", ",", "bs", ":", "int", "=", "64", ",", "val_bs", ":", "int", "=", "None", ",", "ds_tfms", ":", "Optional", "[", "TfmList", "]", "=", "None", ",", "num_workers", ":", "int", "=", "defaults", ".", "cpus", ",", "dl_tfms", ":", "Optional", "[", "Collection", "[", "Callable", "]", "]", "=", "None", ",", "device", ":", "torch", ".", "device", "=", "None", ",", "test", ":", "Optional", "[", "PathOrStr", "]", "=", "None", ",", "collate_fn", ":", "Callable", "=", "data_collate", ",", "size", ":", "int", "=", "None", ",", "no_check", ":", "bool", "=", "False", ",", "resize_method", ":", "ResizeMethod", "=", "None", ",", "mult", ":", "int", "=", "None", ",", "padding_mode", ":", "str", "=", "'reflection'", ",", "mode", ":", "str", "=", "'bilinear'", ",", "tfm_y", ":", "bool", "=", "False", ")", "->", "'ImageDataBunch'", ":", "lls", "=", "lls", ".", "transform", "(", "tfms", "=", "ds_tfms", ",", "size", "=", "size", ",", "resize_method", "=", "resize_method", ",", "mult", "=", "mult", ",", "padding_mode", "=", "padding_mode", ",", "mode", "=", "mode", ",", "tfm_y", "=", "tfm_y", ")", "if", "test", "is", "not", "None", ":", "lls", ".", "add_test_folder", "(", "test", ")", "return", "lls", ".", "databunch", "(", "bs", "=", "bs", ",", "val_bs", "=", "val_bs", ",", "dl_tfms", "=", "dl_tfms", ",", "num_workers", "=", "num_workers", ",", "collate_fn", "=", "collate_fn", ",", "device", "=", "device", ",", "no_check", "=", "no_check", ")" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
ImageDataBunch.from_folder
Create from imagenet style dataset in `path` with `train`,`valid`,`test` subfolders (or provide `valid_pct`).
fastai/vision/data.py
def from_folder(cls, path:PathOrStr, train:PathOrStr='train', valid:PathOrStr='valid', valid_pct=None, classes:Collection=None, **kwargs:Any)->'ImageDataBunch': "Create from imagenet style dataset in `path` with `train`,`valid`,`test` subfolders (or provide `valid_pct`)." path=Path(path) il = ImageList.from_folder(path) if valid_pct is None: src = il.split_by_folder(train=train, valid=valid) else: src = il.split_by_rand_pct(valid_pct) src = src.label_from_folder(classes=classes) return cls.create_from_ll(src, **kwargs)
def from_folder(cls, path:PathOrStr, train:PathOrStr='train', valid:PathOrStr='valid', valid_pct=None, classes:Collection=None, **kwargs:Any)->'ImageDataBunch': "Create from imagenet style dataset in `path` with `train`,`valid`,`test` subfolders (or provide `valid_pct`)." path=Path(path) il = ImageList.from_folder(path) if valid_pct is None: src = il.split_by_folder(train=train, valid=valid) else: src = il.split_by_rand_pct(valid_pct) src = src.label_from_folder(classes=classes) return cls.create_from_ll(src, **kwargs)
[ "Create", "from", "imagenet", "style", "dataset", "in", "path", "with", "train", "valid", "test", "subfolders", "(", "or", "provide", "valid_pct", ")", "." ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/vision/data.py#L103-L111
[ "def", "from_folder", "(", "cls", ",", "path", ":", "PathOrStr", ",", "train", ":", "PathOrStr", "=", "'train'", ",", "valid", ":", "PathOrStr", "=", "'valid'", ",", "valid_pct", "=", "None", ",", "classes", ":", "Collection", "=", "None", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "'ImageDataBunch'", ":", "path", "=", "Path", "(", "path", ")", "il", "=", "ImageList", ".", "from_folder", "(", "path", ")", "if", "valid_pct", "is", "None", ":", "src", "=", "il", ".", "split_by_folder", "(", "train", "=", "train", ",", "valid", "=", "valid", ")", "else", ":", "src", "=", "il", ".", "split_by_rand_pct", "(", "valid_pct", ")", "src", "=", "src", ".", "label_from_folder", "(", "classes", "=", "classes", ")", "return", "cls", ".", "create_from_ll", "(", "src", ",", "*", "*", "kwargs", ")" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
ImageDataBunch.from_df
Create from a `DataFrame` `df`.
fastai/vision/data.py
def from_df(cls, path:PathOrStr, df:pd.DataFrame, folder:PathOrStr=None, label_delim:str=None, valid_pct:float=0.2, fn_col:IntsOrStrs=0, label_col:IntsOrStrs=1, suffix:str='', **kwargs:Any)->'ImageDataBunch': "Create from a `DataFrame` `df`." src = (ImageList.from_df(df, path=path, folder=folder, suffix=suffix, cols=fn_col) .split_by_rand_pct(valid_pct) .label_from_df(label_delim=label_delim, cols=label_col)) return cls.create_from_ll(src, **kwargs)
def from_df(cls, path:PathOrStr, df:pd.DataFrame, folder:PathOrStr=None, label_delim:str=None, valid_pct:float=0.2, fn_col:IntsOrStrs=0, label_col:IntsOrStrs=1, suffix:str='', **kwargs:Any)->'ImageDataBunch': "Create from a `DataFrame` `df`." src = (ImageList.from_df(df, path=path, folder=folder, suffix=suffix, cols=fn_col) .split_by_rand_pct(valid_pct) .label_from_df(label_delim=label_delim, cols=label_col)) return cls.create_from_ll(src, **kwargs)
[ "Create", "from", "a", "DataFrame", "df", "." ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/vision/data.py#L114-L120
[ "def", "from_df", "(", "cls", ",", "path", ":", "PathOrStr", ",", "df", ":", "pd", ".", "DataFrame", ",", "folder", ":", "PathOrStr", "=", "None", ",", "label_delim", ":", "str", "=", "None", ",", "valid_pct", ":", "float", "=", "0.2", ",", "fn_col", ":", "IntsOrStrs", "=", "0", ",", "label_col", ":", "IntsOrStrs", "=", "1", ",", "suffix", ":", "str", "=", "''", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "'ImageDataBunch'", ":", "src", "=", "(", "ImageList", ".", "from_df", "(", "df", ",", "path", "=", "path", ",", "folder", "=", "folder", ",", "suffix", "=", "suffix", ",", "cols", "=", "fn_col", ")", ".", "split_by_rand_pct", "(", "valid_pct", ")", ".", "label_from_df", "(", "label_delim", "=", "label_delim", ",", "cols", "=", "label_col", ")", ")", "return", "cls", ".", "create_from_ll", "(", "src", ",", "*", "*", "kwargs", ")" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
ImageDataBunch.from_csv
Create from a csv file in `path/csv_labels`.
fastai/vision/data.py
def from_csv(cls, path:PathOrStr, folder:PathOrStr=None, label_delim:str=None, csv_labels:PathOrStr='labels.csv', valid_pct:float=0.2, fn_col:int=0, label_col:int=1, suffix:str='', delimiter:str=None, header:Optional[Union[int,str]]='infer', **kwargs:Any)->'ImageDataBunch': "Create from a csv file in `path/csv_labels`." path = Path(path) df = pd.read_csv(path/csv_labels, header=header, delimiter=delimiter) return cls.from_df(path, df, folder=folder, label_delim=label_delim, valid_pct=valid_pct, fn_col=fn_col, label_col=label_col, suffix=suffix, **kwargs)
def from_csv(cls, path:PathOrStr, folder:PathOrStr=None, label_delim:str=None, csv_labels:PathOrStr='labels.csv', valid_pct:float=0.2, fn_col:int=0, label_col:int=1, suffix:str='', delimiter:str=None, header:Optional[Union[int,str]]='infer', **kwargs:Any)->'ImageDataBunch': "Create from a csv file in `path/csv_labels`." path = Path(path) df = pd.read_csv(path/csv_labels, header=header, delimiter=delimiter) return cls.from_df(path, df, folder=folder, label_delim=label_delim, valid_pct=valid_pct, fn_col=fn_col, label_col=label_col, suffix=suffix, **kwargs)
[ "Create", "from", "a", "csv", "file", "in", "path", "/", "csv_labels", "." ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/vision/data.py#L123-L130
[ "def", "from_csv", "(", "cls", ",", "path", ":", "PathOrStr", ",", "folder", ":", "PathOrStr", "=", "None", ",", "label_delim", ":", "str", "=", "None", ",", "csv_labels", ":", "PathOrStr", "=", "'labels.csv'", ",", "valid_pct", ":", "float", "=", "0.2", ",", "fn_col", ":", "int", "=", "0", ",", "label_col", ":", "int", "=", "1", ",", "suffix", ":", "str", "=", "''", ",", "delimiter", ":", "str", "=", "None", ",", "header", ":", "Optional", "[", "Union", "[", "int", ",", "str", "]", "]", "=", "'infer'", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "'ImageDataBunch'", ":", "path", "=", "Path", "(", "path", ")", "df", "=", "pd", ".", "read_csv", "(", "path", "/", "csv_labels", ",", "header", "=", "header", ",", "delimiter", "=", "delimiter", ")", "return", "cls", ".", "from_df", "(", "path", ",", "df", ",", "folder", "=", "folder", ",", "label_delim", "=", "label_delim", ",", "valid_pct", "=", "valid_pct", ",", "fn_col", "=", "fn_col", ",", "label_col", "=", "label_col", ",", "suffix", "=", "suffix", ",", "*", "*", "kwargs", ")" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
ImageDataBunch.from_lists
Create from list of `fnames` in `path`.
fastai/vision/data.py
def from_lists(cls, path:PathOrStr, fnames:FilePathList, labels:Collection[str], valid_pct:float=0.2, item_cls:Callable=None, **kwargs): "Create from list of `fnames` in `path`." item_cls = ifnone(item_cls, ImageList) fname2label = {f:l for (f,l) in zip(fnames, labels)} src = (item_cls(fnames, path=path).split_by_rand_pct(valid_pct) .label_from_func(lambda x:fname2label[x])) return cls.create_from_ll(src, **kwargs)
def from_lists(cls, path:PathOrStr, fnames:FilePathList, labels:Collection[str], valid_pct:float=0.2, item_cls:Callable=None, **kwargs): "Create from list of `fnames` in `path`." item_cls = ifnone(item_cls, ImageList) fname2label = {f:l for (f,l) in zip(fnames, labels)} src = (item_cls(fnames, path=path).split_by_rand_pct(valid_pct) .label_from_func(lambda x:fname2label[x])) return cls.create_from_ll(src, **kwargs)
[ "Create", "from", "list", "of", "fnames", "in", "path", "." ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/vision/data.py#L133-L140
[ "def", "from_lists", "(", "cls", ",", "path", ":", "PathOrStr", ",", "fnames", ":", "FilePathList", ",", "labels", ":", "Collection", "[", "str", "]", ",", "valid_pct", ":", "float", "=", "0.2", ",", "item_cls", ":", "Callable", "=", "None", ",", "*", "*", "kwargs", ")", ":", "item_cls", "=", "ifnone", "(", "item_cls", ",", "ImageList", ")", "fname2label", "=", "{", "f", ":", "l", "for", "(", "f", ",", "l", ")", "in", "zip", "(", "fnames", ",", "labels", ")", "}", "src", "=", "(", "item_cls", "(", "fnames", ",", "path", "=", "path", ")", ".", "split_by_rand_pct", "(", "valid_pct", ")", ".", "label_from_func", "(", "lambda", "x", ":", "fname2label", "[", "x", "]", ")", ")", "return", "cls", ".", "create_from_ll", "(", "src", ",", "*", "*", "kwargs", ")" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
ImageDataBunch.from_name_func
Create from list of `fnames` in `path` with `label_func`.
fastai/vision/data.py
def from_name_func(cls, path:PathOrStr, fnames:FilePathList, label_func:Callable, valid_pct:float=0.2, **kwargs): "Create from list of `fnames` in `path` with `label_func`." src = ImageList(fnames, path=path).split_by_rand_pct(valid_pct) return cls.create_from_ll(src.label_from_func(label_func), **kwargs)
def from_name_func(cls, path:PathOrStr, fnames:FilePathList, label_func:Callable, valid_pct:float=0.2, **kwargs): "Create from list of `fnames` in `path` with `label_func`." src = ImageList(fnames, path=path).split_by_rand_pct(valid_pct) return cls.create_from_ll(src.label_from_func(label_func), **kwargs)
[ "Create", "from", "list", "of", "fnames", "in", "path", "with", "label_func", "." ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/vision/data.py#L143-L146
[ "def", "from_name_func", "(", "cls", ",", "path", ":", "PathOrStr", ",", "fnames", ":", "FilePathList", ",", "label_func", ":", "Callable", ",", "valid_pct", ":", "float", "=", "0.2", ",", "*", "*", "kwargs", ")", ":", "src", "=", "ImageList", "(", "fnames", ",", "path", "=", "path", ")", ".", "split_by_rand_pct", "(", "valid_pct", ")", "return", "cls", ".", "create_from_ll", "(", "src", ".", "label_from_func", "(", "label_func", ")", ",", "*", "*", "kwargs", ")" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
ImageDataBunch.from_name_re
Create from list of `fnames` in `path` with re expression `pat`.
fastai/vision/data.py
def from_name_re(cls, path:PathOrStr, fnames:FilePathList, pat:str, valid_pct:float=0.2, **kwargs): "Create from list of `fnames` in `path` with re expression `pat`." pat = re.compile(pat) def _get_label(fn): if isinstance(fn, Path): fn = fn.as_posix() res = pat.search(str(fn)) assert res,f'Failed to find "{pat}" in "{fn}"' return res.group(1) return cls.from_name_func(path, fnames, _get_label, valid_pct=valid_pct, **kwargs)
def from_name_re(cls, path:PathOrStr, fnames:FilePathList, pat:str, valid_pct:float=0.2, **kwargs): "Create from list of `fnames` in `path` with re expression `pat`." pat = re.compile(pat) def _get_label(fn): if isinstance(fn, Path): fn = fn.as_posix() res = pat.search(str(fn)) assert res,f'Failed to find "{pat}" in "{fn}"' return res.group(1) return cls.from_name_func(path, fnames, _get_label, valid_pct=valid_pct, **kwargs)
[ "Create", "from", "list", "of", "fnames", "in", "path", "with", "re", "expression", "pat", "." ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/vision/data.py#L149-L157
[ "def", "from_name_re", "(", "cls", ",", "path", ":", "PathOrStr", ",", "fnames", ":", "FilePathList", ",", "pat", ":", "str", ",", "valid_pct", ":", "float", "=", "0.2", ",", "*", "*", "kwargs", ")", ":", "pat", "=", "re", ".", "compile", "(", "pat", ")", "def", "_get_label", "(", "fn", ")", ":", "if", "isinstance", "(", "fn", ",", "Path", ")", ":", "fn", "=", "fn", ".", "as_posix", "(", ")", "res", "=", "pat", ".", "search", "(", "str", "(", "fn", ")", ")", "assert", "res", ",", "f'Failed to find \"{pat}\" in \"{fn}\"'", "return", "res", ".", "group", "(", "1", ")", "return", "cls", ".", "from_name_func", "(", "path", ",", "fnames", ",", "_get_label", ",", "valid_pct", "=", "valid_pct", ",", "*", "*", "kwargs", ")" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
ImageDataBunch.single_from_classes
Create an empty `ImageDataBunch` in `path` with `classes`. Typically used for inference.
fastai/vision/data.py
def single_from_classes(path:Union[Path, str], classes:Collection[str], ds_tfms:TfmList=None, **kwargs): "Create an empty `ImageDataBunch` in `path` with `classes`. Typically used for inference." warn("""This method is deprecated and will be removed in a future version, use `load_learner` after `Learner.export()`""", DeprecationWarning) sd = ImageList([], path=path, ignore_empty=True).split_none() return sd.label_const(0, label_cls=CategoryList, classes=classes).transform(ds_tfms, **kwargs).databunch()
def single_from_classes(path:Union[Path, str], classes:Collection[str], ds_tfms:TfmList=None, **kwargs): "Create an empty `ImageDataBunch` in `path` with `classes`. Typically used for inference." warn("""This method is deprecated and will be removed in a future version, use `load_learner` after `Learner.export()`""", DeprecationWarning) sd = ImageList([], path=path, ignore_empty=True).split_none() return sd.label_const(0, label_cls=CategoryList, classes=classes).transform(ds_tfms, **kwargs).databunch()
[ "Create", "an", "empty", "ImageDataBunch", "in", "path", "with", "classes", ".", "Typically", "used", "for", "inference", "." ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/vision/data.py#L160-L165
[ "def", "single_from_classes", "(", "path", ":", "Union", "[", "Path", ",", "str", "]", ",", "classes", ":", "Collection", "[", "str", "]", ",", "ds_tfms", ":", "TfmList", "=", "None", ",", "*", "*", "kwargs", ")", ":", "warn", "(", "\"\"\"This method is deprecated and will be removed in a future version, use `load_learner` after\n `Learner.export()`\"\"\"", ",", "DeprecationWarning", ")", "sd", "=", "ImageList", "(", "[", "]", ",", "path", "=", "path", ",", "ignore_empty", "=", "True", ")", ".", "split_none", "(", ")", "return", "sd", ".", "label_const", "(", "0", ",", "label_cls", "=", "CategoryList", ",", "classes", "=", "classes", ")", ".", "transform", "(", "ds_tfms", ",", "*", "*", "kwargs", ")", ".", "databunch", "(", ")" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
ImageDataBunch.batch_stats
Grab a batch of data and call reduction function `func` per channel
fastai/vision/data.py
def batch_stats(self, funcs:Collection[Callable]=None, ds_type:DatasetType=DatasetType.Train)->Tensor: "Grab a batch of data and call reduction function `func` per channel" funcs = ifnone(funcs, [torch.mean,torch.std]) x = self.one_batch(ds_type=ds_type, denorm=False)[0].cpu() return [func(channel_view(x), 1) for func in funcs]
def batch_stats(self, funcs:Collection[Callable]=None, ds_type:DatasetType=DatasetType.Train)->Tensor: "Grab a batch of data and call reduction function `func` per channel" funcs = ifnone(funcs, [torch.mean,torch.std]) x = self.one_batch(ds_type=ds_type, denorm=False)[0].cpu() return [func(channel_view(x), 1) for func in funcs]
[ "Grab", "a", "batch", "of", "data", "and", "call", "reduction", "function", "func", "per", "channel" ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/vision/data.py#L167-L171
[ "def", "batch_stats", "(", "self", ",", "funcs", ":", "Collection", "[", "Callable", "]", "=", "None", ",", "ds_type", ":", "DatasetType", "=", "DatasetType", ".", "Train", ")", "->", "Tensor", ":", "funcs", "=", "ifnone", "(", "funcs", ",", "[", "torch", ".", "mean", ",", "torch", ".", "std", "]", ")", "x", "=", "self", ".", "one_batch", "(", "ds_type", "=", "ds_type", ",", "denorm", "=", "False", ")", "[", "0", "]", ".", "cpu", "(", ")", "return", "[", "func", "(", "channel_view", "(", "x", ")", ",", "1", ")", "for", "func", "in", "funcs", "]" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
ImageDataBunch.normalize
Add normalize transform using `stats` (defaults to `DataBunch.batch_stats`)
fastai/vision/data.py
def normalize(self, stats:Collection[Tensor]=None, do_x:bool=True, do_y:bool=False)->None: "Add normalize transform using `stats` (defaults to `DataBunch.batch_stats`)" if getattr(self,'norm',False): raise Exception('Can not call normalize twice') if stats is None: self.stats = self.batch_stats() else: self.stats = stats self.norm,self.denorm = normalize_funcs(*self.stats, do_x=do_x, do_y=do_y) self.add_tfm(self.norm) return self
def normalize(self, stats:Collection[Tensor]=None, do_x:bool=True, do_y:bool=False)->None: "Add normalize transform using `stats` (defaults to `DataBunch.batch_stats`)" if getattr(self,'norm',False): raise Exception('Can not call normalize twice') if stats is None: self.stats = self.batch_stats() else: self.stats = stats self.norm,self.denorm = normalize_funcs(*self.stats, do_x=do_x, do_y=do_y) self.add_tfm(self.norm) return self
[ "Add", "normalize", "transform", "using", "stats", "(", "defaults", "to", "DataBunch", ".", "batch_stats", ")" ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/vision/data.py#L173-L180
[ "def", "normalize", "(", "self", ",", "stats", ":", "Collection", "[", "Tensor", "]", "=", "None", ",", "do_x", ":", "bool", "=", "True", ",", "do_y", ":", "bool", "=", "False", ")", "->", "None", ":", "if", "getattr", "(", "self", ",", "'norm'", ",", "False", ")", ":", "raise", "Exception", "(", "'Can not call normalize twice'", ")", "if", "stats", "is", "None", ":", "self", ".", "stats", "=", "self", ".", "batch_stats", "(", ")", "else", ":", "self", ".", "stats", "=", "stats", "self", ".", "norm", ",", "self", ".", "denorm", "=", "normalize_funcs", "(", "*", "self", ".", "stats", ",", "do_x", "=", "do_x", ",", "do_y", "=", "do_y", ")", "self", ".", "add_tfm", "(", "self", ".", "norm", ")", "return", "self" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
ImageList.open
Open image in `fn`, subclass and overwrite for custom behavior.
fastai/vision/data.py
def open(self, fn): "Open image in `fn`, subclass and overwrite for custom behavior." return open_image(fn, convert_mode=self.convert_mode, after_open=self.after_open)
def open(self, fn): "Open image in `fn`, subclass and overwrite for custom behavior." return open_image(fn, convert_mode=self.convert_mode, after_open=self.after_open)
[ "Open", "image", "in", "fn", "subclass", "and", "overwrite", "for", "custom", "behavior", "." ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/vision/data.py#L264-L266
[ "def", "open", "(", "self", ",", "fn", ")", ":", "return", "open_image", "(", "fn", ",", "convert_mode", "=", "self", ".", "convert_mode", ",", "after_open", "=", "self", ".", "after_open", ")" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
ImageList.from_folder
Get the list of files in `path` that have an image suffix. `recurse` determines if we search subfolders.
fastai/vision/data.py
def from_folder(cls, path:PathOrStr='.', extensions:Collection[str]=None, **kwargs)->ItemList: "Get the list of files in `path` that have an image suffix. `recurse` determines if we search subfolders." extensions = ifnone(extensions, image_extensions) return super().from_folder(path=path, extensions=extensions, **kwargs)
def from_folder(cls, path:PathOrStr='.', extensions:Collection[str]=None, **kwargs)->ItemList: "Get the list of files in `path` that have an image suffix. `recurse` determines if we search subfolders." extensions = ifnone(extensions, image_extensions) return super().from_folder(path=path, extensions=extensions, **kwargs)
[ "Get", "the", "list", "of", "files", "in", "path", "that", "have", "an", "image", "suffix", ".", "recurse", "determines", "if", "we", "search", "subfolders", "." ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/vision/data.py#L275-L278
[ "def", "from_folder", "(", "cls", ",", "path", ":", "PathOrStr", "=", "'.'", ",", "extensions", ":", "Collection", "[", "str", "]", "=", "None", ",", "*", "*", "kwargs", ")", "->", "ItemList", ":", "extensions", "=", "ifnone", "(", "extensions", ",", "image_extensions", ")", "return", "super", "(", ")", ".", "from_folder", "(", "path", "=", "path", ",", "extensions", "=", "extensions", ",", "*", "*", "kwargs", ")" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
ImageList.from_df
Get the filenames in `cols` of `df` with `folder` in front of them, `suffix` at the end.
fastai/vision/data.py
def from_df(cls, df:DataFrame, path:PathOrStr, cols:IntsOrStrs=0, folder:PathOrStr=None, suffix:str='', **kwargs)->'ItemList': "Get the filenames in `cols` of `df` with `folder` in front of them, `suffix` at the end." suffix = suffix or '' res = super().from_df(df, path=path, cols=cols, **kwargs) pref = f'{res.path}{os.path.sep}' if folder is not None: pref += f'{folder}{os.path.sep}' res.items = np.char.add(np.char.add(pref, res.items.astype(str)), suffix) return res
def from_df(cls, df:DataFrame, path:PathOrStr, cols:IntsOrStrs=0, folder:PathOrStr=None, suffix:str='', **kwargs)->'ItemList': "Get the filenames in `cols` of `df` with `folder` in front of them, `suffix` at the end." suffix = suffix or '' res = super().from_df(df, path=path, cols=cols, **kwargs) pref = f'{res.path}{os.path.sep}' if folder is not None: pref += f'{folder}{os.path.sep}' res.items = np.char.add(np.char.add(pref, res.items.astype(str)), suffix) return res
[ "Get", "the", "filenames", "in", "cols", "of", "df", "with", "folder", "in", "front", "of", "them", "suffix", "at", "the", "end", "." ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/vision/data.py#L281-L288
[ "def", "from_df", "(", "cls", ",", "df", ":", "DataFrame", ",", "path", ":", "PathOrStr", ",", "cols", ":", "IntsOrStrs", "=", "0", ",", "folder", ":", "PathOrStr", "=", "None", ",", "suffix", ":", "str", "=", "''", ",", "*", "*", "kwargs", ")", "->", "'ItemList'", ":", "suffix", "=", "suffix", "or", "''", "res", "=", "super", "(", ")", ".", "from_df", "(", "df", ",", "path", "=", "path", ",", "cols", "=", "cols", ",", "*", "*", "kwargs", ")", "pref", "=", "f'{res.path}{os.path.sep}'", "if", "folder", "is", "not", "None", ":", "pref", "+=", "f'{folder}{os.path.sep}'", "res", ".", "items", "=", "np", ".", "char", ".", "add", "(", "np", ".", "char", ".", "add", "(", "pref", ",", "res", ".", "items", ".", "astype", "(", "str", ")", ")", ",", "suffix", ")", "return", "res" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
ImageList.from_csv
Get the filenames in `path/csv_name` opened with `header`.
fastai/vision/data.py
def from_csv(cls, path:PathOrStr, csv_name:str, header:str='infer', **kwargs)->'ItemList': "Get the filenames in `path/csv_name` opened with `header`." path = Path(path) df = pd.read_csv(path/csv_name, header=header) return cls.from_df(df, path=path, **kwargs)
def from_csv(cls, path:PathOrStr, csv_name:str, header:str='infer', **kwargs)->'ItemList': "Get the filenames in `path/csv_name` opened with `header`." path = Path(path) df = pd.read_csv(path/csv_name, header=header) return cls.from_df(df, path=path, **kwargs)
[ "Get", "the", "filenames", "in", "path", "/", "csv_name", "opened", "with", "header", "." ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/vision/data.py#L291-L295
[ "def", "from_csv", "(", "cls", ",", "path", ":", "PathOrStr", ",", "csv_name", ":", "str", ",", "header", ":", "str", "=", "'infer'", ",", "*", "*", "kwargs", ")", "->", "'ItemList'", ":", "path", "=", "Path", "(", "path", ")", "df", "=", "pd", ".", "read_csv", "(", "path", "/", "csv_name", ",", "header", "=", "header", ")", "return", "cls", ".", "from_df", "(", "df", ",", "path", "=", "path", ",", "*", "*", "kwargs", ")" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
ImageList.show_xys
Show the `xs` (inputs) and `ys` (targets) on a figure of `figsize`.
fastai/vision/data.py
def show_xys(self, xs, ys, imgsize:int=4, figsize:Optional[Tuple[int,int]]=None, **kwargs): "Show the `xs` (inputs) and `ys` (targets) on a figure of `figsize`." rows = int(np.ceil(math.sqrt(len(xs)))) axs = subplots(rows, rows, imgsize=imgsize, figsize=figsize) for x,y,ax in zip(xs, ys, axs.flatten()): x.show(ax=ax, y=y, **kwargs) for ax in axs.flatten()[len(xs):]: ax.axis('off') plt.tight_layout()
def show_xys(self, xs, ys, imgsize:int=4, figsize:Optional[Tuple[int,int]]=None, **kwargs): "Show the `xs` (inputs) and `ys` (targets) on a figure of `figsize`." rows = int(np.ceil(math.sqrt(len(xs)))) axs = subplots(rows, rows, imgsize=imgsize, figsize=figsize) for x,y,ax in zip(xs, ys, axs.flatten()): x.show(ax=ax, y=y, **kwargs) for ax in axs.flatten()[len(xs):]: ax.axis('off') plt.tight_layout()
[ "Show", "the", "xs", "(", "inputs", ")", "and", "ys", "(", "targets", ")", "on", "a", "figure", "of", "figsize", "." ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/vision/data.py#L299-L305
[ "def", "show_xys", "(", "self", ",", "xs", ",", "ys", ",", "imgsize", ":", "int", "=", "4", ",", "figsize", ":", "Optional", "[", "Tuple", "[", "int", ",", "int", "]", "]", "=", "None", ",", "*", "*", "kwargs", ")", ":", "rows", "=", "int", "(", "np", ".", "ceil", "(", "math", ".", "sqrt", "(", "len", "(", "xs", ")", ")", ")", ")", "axs", "=", "subplots", "(", "rows", ",", "rows", ",", "imgsize", "=", "imgsize", ",", "figsize", "=", "figsize", ")", "for", "x", ",", "y", ",", "ax", "in", "zip", "(", "xs", ",", "ys", ",", "axs", ".", "flatten", "(", ")", ")", ":", "x", ".", "show", "(", "ax", "=", "ax", ",", "y", "=", "y", ",", "*", "*", "kwargs", ")", "for", "ax", "in", "axs", ".", "flatten", "(", ")", "[", "len", "(", "xs", ")", ":", "]", ":", "ax", ".", "axis", "(", "'off'", ")", "plt", ".", "tight_layout", "(", ")" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
ImageList.show_xyzs
Show `xs` (inputs), `ys` (targets) and `zs` (predictions) on a figure of `figsize`.
fastai/vision/data.py
def show_xyzs(self, xs, ys, zs, imgsize:int=4, figsize:Optional[Tuple[int,int]]=None, **kwargs): "Show `xs` (inputs), `ys` (targets) and `zs` (predictions) on a figure of `figsize`." if self._square_show_res: title = 'Ground truth\nPredictions' rows = int(np.ceil(math.sqrt(len(xs)))) axs = subplots(rows, rows, imgsize=imgsize, figsize=figsize, title=title, weight='bold', size=12) for x,y,z,ax in zip(xs,ys,zs,axs.flatten()): x.show(ax=ax, title=f'{str(y)}\n{str(z)}', **kwargs) for ax in axs.flatten()[len(xs):]: ax.axis('off') else: title = 'Ground truth/Predictions' axs = subplots(len(xs), 2, imgsize=imgsize, figsize=figsize, title=title, weight='bold', size=14) for i,(x,y,z) in enumerate(zip(xs,ys,zs)): x.show(ax=axs[i,0], y=y, **kwargs) x.show(ax=axs[i,1], y=z, **kwargs)
def show_xyzs(self, xs, ys, zs, imgsize:int=4, figsize:Optional[Tuple[int,int]]=None, **kwargs): "Show `xs` (inputs), `ys` (targets) and `zs` (predictions) on a figure of `figsize`." if self._square_show_res: title = 'Ground truth\nPredictions' rows = int(np.ceil(math.sqrt(len(xs)))) axs = subplots(rows, rows, imgsize=imgsize, figsize=figsize, title=title, weight='bold', size=12) for x,y,z,ax in zip(xs,ys,zs,axs.flatten()): x.show(ax=ax, title=f'{str(y)}\n{str(z)}', **kwargs) for ax in axs.flatten()[len(xs):]: ax.axis('off') else: title = 'Ground truth/Predictions' axs = subplots(len(xs), 2, imgsize=imgsize, figsize=figsize, title=title, weight='bold', size=14) for i,(x,y,z) in enumerate(zip(xs,ys,zs)): x.show(ax=axs[i,0], y=y, **kwargs) x.show(ax=axs[i,1], y=z, **kwargs)
[ "Show", "xs", "(", "inputs", ")", "ys", "(", "targets", ")", "and", "zs", "(", "predictions", ")", "on", "a", "figure", "of", "figsize", "." ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/vision/data.py#L307-L320
[ "def", "show_xyzs", "(", "self", ",", "xs", ",", "ys", ",", "zs", ",", "imgsize", ":", "int", "=", "4", ",", "figsize", ":", "Optional", "[", "Tuple", "[", "int", ",", "int", "]", "]", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "self", ".", "_square_show_res", ":", "title", "=", "'Ground truth\\nPredictions'", "rows", "=", "int", "(", "np", ".", "ceil", "(", "math", ".", "sqrt", "(", "len", "(", "xs", ")", ")", ")", ")", "axs", "=", "subplots", "(", "rows", ",", "rows", ",", "imgsize", "=", "imgsize", ",", "figsize", "=", "figsize", ",", "title", "=", "title", ",", "weight", "=", "'bold'", ",", "size", "=", "12", ")", "for", "x", ",", "y", ",", "z", ",", "ax", "in", "zip", "(", "xs", ",", "ys", ",", "zs", ",", "axs", ".", "flatten", "(", ")", ")", ":", "x", ".", "show", "(", "ax", "=", "ax", ",", "title", "=", "f'{str(y)}\\n{str(z)}'", ",", "*", "*", "kwargs", ")", "for", "ax", "in", "axs", ".", "flatten", "(", ")", "[", "len", "(", "xs", ")", ":", "]", ":", "ax", ".", "axis", "(", "'off'", ")", "else", ":", "title", "=", "'Ground truth/Predictions'", "axs", "=", "subplots", "(", "len", "(", "xs", ")", ",", "2", ",", "imgsize", "=", "imgsize", ",", "figsize", "=", "figsize", ",", "title", "=", "title", ",", "weight", "=", "'bold'", ",", "size", "=", "14", ")", "for", "i", ",", "(", "x", ",", "y", ",", "z", ")", "in", "enumerate", "(", "zip", "(", "xs", ",", "ys", ",", "zs", ")", ")", ":", "x", ".", "show", "(", "ax", "=", "axs", "[", "i", ",", "0", "]", ",", "y", "=", "y", ",", "*", "*", "kwargs", ")", "x", ".", "show", "(", "ax", "=", "axs", "[", "i", ",", "1", "]", ",", "y", "=", "z", ",", "*", "*", "kwargs", ")" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
ObjectCategoryProcessor.generate_classes
Generate classes from unique `items` and add `background`.
fastai/vision/data.py
def generate_classes(self, items): "Generate classes from unique `items` and add `background`." classes = super().generate_classes([o[1] for o in items]) classes = ['background'] + list(classes) return classes
def generate_classes(self, items): "Generate classes from unique `items` and add `background`." classes = super().generate_classes([o[1] for o in items]) classes = ['background'] + list(classes) return classes
[ "Generate", "classes", "from", "unique", "items", "and", "add", "background", "." ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/vision/data.py#L335-L339
[ "def", "generate_classes", "(", "self", ",", "items", ")", ":", "classes", "=", "super", "(", ")", ".", "generate_classes", "(", "[", "o", "[", "1", "]", "for", "o", "in", "items", "]", ")", "classes", "=", "[", "'background'", "]", "+", "list", "(", "classes", ")", "return", "classes" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
ImageImageList.show_xys
Show the `xs` (inputs) and `ys`(targets) on a figure of `figsize`.
fastai/vision/data.py
def show_xys(self, xs, ys, imgsize:int=4, figsize:Optional[Tuple[int,int]]=None, **kwargs): "Show the `xs` (inputs) and `ys`(targets) on a figure of `figsize`." axs = subplots(len(xs), 2, imgsize=imgsize, figsize=figsize) for i, (x,y) in enumerate(zip(xs,ys)): x.show(ax=axs[i,0], **kwargs) y.show(ax=axs[i,1], **kwargs) plt.tight_layout()
def show_xys(self, xs, ys, imgsize:int=4, figsize:Optional[Tuple[int,int]]=None, **kwargs): "Show the `xs` (inputs) and `ys`(targets) on a figure of `figsize`." axs = subplots(len(xs), 2, imgsize=imgsize, figsize=figsize) for i, (x,y) in enumerate(zip(xs,ys)): x.show(ax=axs[i,0], **kwargs) y.show(ax=axs[i,1], **kwargs) plt.tight_layout()
[ "Show", "the", "xs", "(", "inputs", ")", "and", "ys", "(", "targets", ")", "on", "a", "figure", "of", "figsize", "." ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/vision/data.py#L416-L422
[ "def", "show_xys", "(", "self", ",", "xs", ",", "ys", ",", "imgsize", ":", "int", "=", "4", ",", "figsize", ":", "Optional", "[", "Tuple", "[", "int", ",", "int", "]", "]", "=", "None", ",", "*", "*", "kwargs", ")", ":", "axs", "=", "subplots", "(", "len", "(", "xs", ")", ",", "2", ",", "imgsize", "=", "imgsize", ",", "figsize", "=", "figsize", ")", "for", "i", ",", "(", "x", ",", "y", ")", "in", "enumerate", "(", "zip", "(", "xs", ",", "ys", ")", ")", ":", "x", ".", "show", "(", "ax", "=", "axs", "[", "i", ",", "0", "]", ",", "*", "*", "kwargs", ")", "y", ".", "show", "(", "ax", "=", "axs", "[", "i", ",", "1", "]", ",", "*", "*", "kwargs", ")", "plt", ".", "tight_layout", "(", ")" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
ImageImageList.show_xyzs
Show `xs` (inputs), `ys` (targets) and `zs` (predictions) on a figure of `figsize`.
fastai/vision/data.py
def show_xyzs(self, xs, ys, zs, imgsize:int=4, figsize:Optional[Tuple[int,int]]=None, **kwargs): "Show `xs` (inputs), `ys` (targets) and `zs` (predictions) on a figure of `figsize`." title = 'Input / Prediction / Target' axs = subplots(len(xs), 3, imgsize=imgsize, figsize=figsize, title=title, weight='bold', size=14) for i,(x,y,z) in enumerate(zip(xs,ys,zs)): x.show(ax=axs[i,0], **kwargs) y.show(ax=axs[i,2], **kwargs) z.show(ax=axs[i,1], **kwargs)
def show_xyzs(self, xs, ys, zs, imgsize:int=4, figsize:Optional[Tuple[int,int]]=None, **kwargs): "Show `xs` (inputs), `ys` (targets) and `zs` (predictions) on a figure of `figsize`." title = 'Input / Prediction / Target' axs = subplots(len(xs), 3, imgsize=imgsize, figsize=figsize, title=title, weight='bold', size=14) for i,(x,y,z) in enumerate(zip(xs,ys,zs)): x.show(ax=axs[i,0], **kwargs) y.show(ax=axs[i,2], **kwargs) z.show(ax=axs[i,1], **kwargs)
[ "Show", "xs", "(", "inputs", ")", "ys", "(", "targets", ")", "and", "zs", "(", "predictions", ")", "on", "a", "figure", "of", "figsize", "." ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/vision/data.py#L424-L431
[ "def", "show_xyzs", "(", "self", ",", "xs", ",", "ys", ",", "zs", ",", "imgsize", ":", "int", "=", "4", ",", "figsize", ":", "Optional", "[", "Tuple", "[", "int", ",", "int", "]", "]", "=", "None", ",", "*", "*", "kwargs", ")", ":", "title", "=", "'Input / Prediction / Target'", "axs", "=", "subplots", "(", "len", "(", "xs", ")", ",", "3", ",", "imgsize", "=", "imgsize", ",", "figsize", "=", "figsize", ",", "title", "=", "title", ",", "weight", "=", "'bold'", ",", "size", "=", "14", ")", "for", "i", ",", "(", "x", ",", "y", ",", "z", ")", "in", "enumerate", "(", "zip", "(", "xs", ",", "ys", ",", "zs", ")", ")", ":", "x", ".", "show", "(", "ax", "=", "axs", "[", "i", ",", "0", "]", ",", "*", "*", "kwargs", ")", "y", ".", "show", "(", "ax", "=", "axs", "[", "i", ",", "2", "]", ",", "*", "*", "kwargs", ")", "z", ".", "show", "(", "ax", "=", "axs", "[", "i", ",", "1", "]", ",", "*", "*", "kwargs", ")" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
gpu_mem_get
get total, used and free memory (in MBs) for gpu `id`. if `id` is not passed, currently selected torch device is used
fastai/utils/mem.py
def gpu_mem_get(id=None): "get total, used and free memory (in MBs) for gpu `id`. if `id` is not passed, currently selected torch device is used" if not use_gpu: return GPUMemory(0, 0, 0) if id is None: id = torch.cuda.current_device() try: handle = pynvml.nvmlDeviceGetHandleByIndex(id) info = pynvml.nvmlDeviceGetMemoryInfo(handle) return GPUMemory(*(map(b2mb, [info.total, info.free, info.used]))) except: return GPUMemory(0, 0, 0)
def gpu_mem_get(id=None): "get total, used and free memory (in MBs) for gpu `id`. if `id` is not passed, currently selected torch device is used" if not use_gpu: return GPUMemory(0, 0, 0) if id is None: id = torch.cuda.current_device() try: handle = pynvml.nvmlDeviceGetHandleByIndex(id) info = pynvml.nvmlDeviceGetMemoryInfo(handle) return GPUMemory(*(map(b2mb, [info.total, info.free, info.used]))) except: return GPUMemory(0, 0, 0)
[ "get", "total", "used", "and", "free", "memory", "(", "in", "MBs", ")", "for", "gpu", "id", ".", "if", "id", "is", "not", "passed", "currently", "selected", "torch", "device", "is", "used" ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/utils/mem.py#L25-L34
[ "def", "gpu_mem_get", "(", "id", "=", "None", ")", ":", "if", "not", "use_gpu", ":", "return", "GPUMemory", "(", "0", ",", "0", ",", "0", ")", "if", "id", "is", "None", ":", "id", "=", "torch", ".", "cuda", ".", "current_device", "(", ")", "try", ":", "handle", "=", "pynvml", ".", "nvmlDeviceGetHandleByIndex", "(", "id", ")", "info", "=", "pynvml", ".", "nvmlDeviceGetMemoryInfo", "(", "handle", ")", "return", "GPUMemory", "(", "*", "(", "map", "(", "b2mb", ",", "[", "info", ".", "total", ",", "info", ".", "free", ",", "info", ".", "used", "]", ")", ")", ")", "except", ":", "return", "GPUMemory", "(", "0", ",", "0", ",", "0", ")" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
gpu_with_max_free_mem
get [gpu_id, its_free_ram] for the first gpu with highest available RAM
fastai/utils/mem.py
def gpu_with_max_free_mem(): "get [gpu_id, its_free_ram] for the first gpu with highest available RAM" mem_all = gpu_mem_get_all() if not len(mem_all): return None, 0 free_all = np.array([x.free for x in mem_all]) id = np.argmax(free_all) return id, free_all[id]
def gpu_with_max_free_mem(): "get [gpu_id, its_free_ram] for the first gpu with highest available RAM" mem_all = gpu_mem_get_all() if not len(mem_all): return None, 0 free_all = np.array([x.free for x in mem_all]) id = np.argmax(free_all) return id, free_all[id]
[ "get", "[", "gpu_id", "its_free_ram", "]", "for", "the", "first", "gpu", "with", "highest", "available", "RAM" ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/utils/mem.py#L64-L70
[ "def", "gpu_with_max_free_mem", "(", ")", ":", "mem_all", "=", "gpu_mem_get_all", "(", ")", "if", "not", "len", "(", "mem_all", ")", ":", "return", "None", ",", "0", "free_all", "=", "np", ".", "array", "(", "[", "x", ".", "free", "for", "x", "in", "mem_all", "]", ")", "id", "=", "np", ".", "argmax", "(", "free_all", ")", "return", "id", ",", "free_all", "[", "id", "]" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
gpu_mem_trace
A decorator that runs `GPUMemTrace` w/ report on func
fastai/utils/mem.py
def gpu_mem_trace(func): "A decorator that runs `GPUMemTrace` w/ report on func" @functools.wraps(func) def wrapper(*args, **kwargs): with GPUMemTrace(ctx=func.__qualname__, on_exit_report=True): return func(*args, **kwargs) return wrapper
def gpu_mem_trace(func): "A decorator that runs `GPUMemTrace` w/ report on func" @functools.wraps(func) def wrapper(*args, **kwargs): with GPUMemTrace(ctx=func.__qualname__, on_exit_report=True): return func(*args, **kwargs) return wrapper
[ "A", "decorator", "that", "runs", "GPUMemTrace", "w", "/", "report", "on", "func" ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/utils/mem.py#L169-L175
[ "def", "gpu_mem_trace", "(", "func", ")", ":", "@", "functools", ".", "wraps", "(", "func", ")", "def", "wrapper", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "with", "GPUMemTrace", "(", "ctx", "=", "func", ".", "__qualname__", ",", "on_exit_report", "=", "True", ")", ":", "return", "func", "(", "*", "args", ",", "*", "*", "kwargs", ")", "return", "wrapper" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
reduce_mem_usage
iterate through all the columns of a dataframe and modify the data type to reduce memory usage.
fastai/utils/mem.py
def reduce_mem_usage(df): """ iterate through all the columns of a dataframe and modify the data type to reduce memory usage. """ start_mem = df.memory_usage().sum() / 1024**2 print('Memory usage of dataframe is {:.2f} MB'.format(start_mem)) #Removed from debugging columns = df.columns #.drop('index') for col in columns: col_type = df[col].dtype if str(col_type) != 'category' and col_type != 'datetime64[ns]' and col_type != bool: if col_type != object: c_min = df[col].min() c_max = df[col].max() if str(col_type)[:3] == 'int': if c_min > np.iinfo(np.int8).min and c_max < np.iinfo(np.int8).max: df[col] = df[col].astype(np.int8) elif c_min > np.iinfo(np.int16).min and c_max < np.iinfo(np.int16).max: df[col] = df[col].astype(np.int16) elif c_min > np.iinfo(np.int32).min and c_max < np.iinfo(np.int32).max: df[col] = df[col].astype(np.int32) elif c_min > np.iinfo(np.int64).min and c_max < np.iinfo(np.int64).max: df[col] = df[col].astype(np.int64) else: #if c_min > np.finfo(np.float16).min and c_max < np.finfo(np.float16).max: #df[col] = df[col].astype(np.float16) #Sometimes causes and error and had to remove if c_min > np.finfo(np.float32).min and c_max < np.finfo(np.float32).max: df[col] = df[col].astype(np.float32) else: print('Error '+col+' Value would be a float64. Disregarding.') else: df[col] = df[col].astype('category') end_mem = df.memory_usage().sum() / 1024**2 print('Memory usage after optimization is: {:.2f} MB'.format(end_mem)) print('Decreased by {:.1f}%'.format(100 * (start_mem - end_mem) / start_mem)) return df
def reduce_mem_usage(df): """ iterate through all the columns of a dataframe and modify the data type to reduce memory usage. """ start_mem = df.memory_usage().sum() / 1024**2 print('Memory usage of dataframe is {:.2f} MB'.format(start_mem)) #Removed from debugging columns = df.columns #.drop('index') for col in columns: col_type = df[col].dtype if str(col_type) != 'category' and col_type != 'datetime64[ns]' and col_type != bool: if col_type != object: c_min = df[col].min() c_max = df[col].max() if str(col_type)[:3] == 'int': if c_min > np.iinfo(np.int8).min and c_max < np.iinfo(np.int8).max: df[col] = df[col].astype(np.int8) elif c_min > np.iinfo(np.int16).min and c_max < np.iinfo(np.int16).max: df[col] = df[col].astype(np.int16) elif c_min > np.iinfo(np.int32).min and c_max < np.iinfo(np.int32).max: df[col] = df[col].astype(np.int32) elif c_min > np.iinfo(np.int64).min and c_max < np.iinfo(np.int64).max: df[col] = df[col].astype(np.int64) else: #if c_min > np.finfo(np.float16).min and c_max < np.finfo(np.float16).max: #df[col] = df[col].astype(np.float16) #Sometimes causes and error and had to remove if c_min > np.finfo(np.float32).min and c_max < np.finfo(np.float32).max: df[col] = df[col].astype(np.float32) else: print('Error '+col+' Value would be a float64. Disregarding.') else: df[col] = df[col].astype('category') end_mem = df.memory_usage().sum() / 1024**2 print('Memory usage after optimization is: {:.2f} MB'.format(end_mem)) print('Decreased by {:.1f}%'.format(100 * (start_mem - end_mem) / start_mem)) return df
[ "iterate", "through", "all", "the", "columns", "of", "a", "dataframe", "and", "modify", "the", "data", "type", "to", "reduce", "memory", "usage", "." ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/utils/mem.py#L177-L218
[ "def", "reduce_mem_usage", "(", "df", ")", ":", "start_mem", "=", "df", ".", "memory_usage", "(", ")", ".", "sum", "(", ")", "/", "1024", "**", "2", "print", "(", "'Memory usage of dataframe is {:.2f} MB'", ".", "format", "(", "start_mem", ")", ")", "#Removed from debugging", "columns", "=", "df", ".", "columns", "#.drop('index')", "for", "col", "in", "columns", ":", "col_type", "=", "df", "[", "col", "]", ".", "dtype", "if", "str", "(", "col_type", ")", "!=", "'category'", "and", "col_type", "!=", "'datetime64[ns]'", "and", "col_type", "!=", "bool", ":", "if", "col_type", "!=", "object", ":", "c_min", "=", "df", "[", "col", "]", ".", "min", "(", ")", "c_max", "=", "df", "[", "col", "]", ".", "max", "(", ")", "if", "str", "(", "col_type", ")", "[", ":", "3", "]", "==", "'int'", ":", "if", "c_min", ">", "np", ".", "iinfo", "(", "np", ".", "int8", ")", ".", "min", "and", "c_max", "<", "np", ".", "iinfo", "(", "np", ".", "int8", ")", ".", "max", ":", "df", "[", "col", "]", "=", "df", "[", "col", "]", ".", "astype", "(", "np", ".", "int8", ")", "elif", "c_min", ">", "np", ".", "iinfo", "(", "np", ".", "int16", ")", ".", "min", "and", "c_max", "<", "np", ".", "iinfo", "(", "np", ".", "int16", ")", ".", "max", ":", "df", "[", "col", "]", "=", "df", "[", "col", "]", ".", "astype", "(", "np", ".", "int16", ")", "elif", "c_min", ">", "np", ".", "iinfo", "(", "np", ".", "int32", ")", ".", "min", "and", "c_max", "<", "np", ".", "iinfo", "(", "np", ".", "int32", ")", ".", "max", ":", "df", "[", "col", "]", "=", "df", "[", "col", "]", ".", "astype", "(", "np", ".", "int32", ")", "elif", "c_min", ">", "np", ".", "iinfo", "(", "np", ".", "int64", ")", ".", "min", "and", "c_max", "<", "np", ".", "iinfo", "(", "np", ".", "int64", ")", ".", "max", ":", "df", "[", "col", "]", "=", "df", "[", "col", "]", ".", "astype", "(", "np", ".", "int64", ")", "else", ":", "#if c_min > np.finfo(np.float16).min and c_max < np.finfo(np.float16).max:", "#df[col] = df[col].astype(np.float16)", "#Sometimes causes and error and had to remove", "if", "c_min", ">", "np", ".", "finfo", "(", "np", ".", "float32", ")", ".", "min", "and", "c_max", "<", "np", ".", "finfo", "(", "np", ".", "float32", ")", ".", "max", ":", "df", "[", "col", "]", "=", "df", "[", "col", "]", ".", "astype", "(", "np", ".", "float32", ")", "else", ":", "print", "(", "'Error '", "+", "col", "+", "' Value would be a float64. Disregarding.'", ")", "else", ":", "df", "[", "col", "]", "=", "df", "[", "col", "]", ".", "astype", "(", "'category'", ")", "end_mem", "=", "df", ".", "memory_usage", "(", ")", ".", "sum", "(", ")", "/", "1024", "**", "2", "print", "(", "'Memory usage after optimization is: {:.2f} MB'", ".", "format", "(", "end_mem", ")", ")", "print", "(", "'Decreased by {:.1f}%'", ".", "format", "(", "100", "*", "(", "start_mem", "-", "end_mem", ")", "/", "start_mem", ")", ")", "return", "df" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
GPUMemTrace._get_ctx
Return ' (ctx: subctx)' or ' (ctx)' or ' (subctx)' or '' depending on this and constructor arguments
fastai/utils/mem.py
def _get_ctx(self, subctx=None): "Return ' (ctx: subctx)' or ' (ctx)' or ' (subctx)' or '' depending on this and constructor arguments" l = [] if self.ctx is not None: l.append(self.ctx) if subctx is not None: l.append(subctx) return '' if len(l) == 0 else f" ({': '.join(l)})"
def _get_ctx(self, subctx=None): "Return ' (ctx: subctx)' or ' (ctx)' or ' (subctx)' or '' depending on this and constructor arguments" l = [] if self.ctx is not None: l.append(self.ctx) if subctx is not None: l.append(subctx) return '' if len(l) == 0 else f" ({': '.join(l)})"
[ "Return", "(", "ctx", ":", "subctx", ")", "or", "(", "ctx", ")", "or", "(", "subctx", ")", "or", "depending", "on", "this", "and", "constructor", "arguments" ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/utils/mem.py#L129-L134
[ "def", "_get_ctx", "(", "self", ",", "subctx", "=", "None", ")", ":", "l", "=", "[", "]", "if", "self", ".", "ctx", "is", "not", "None", ":", "l", ".", "append", "(", "self", ".", "ctx", ")", "if", "subctx", "is", "not", "None", ":", "l", ".", "append", "(", "subctx", ")", "return", "''", "if", "len", "(", "l", ")", "==", "0", "else", "f\" ({': '.join(l)})\"" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
_learner_distributed
Put `learn` on distributed training with `cuda_id`.
fastai/distributed.py
def _learner_distributed(learn:Learner, cuda_id:int, cache_dir:PathOrStr='tmp'): "Put `learn` on distributed training with `cuda_id`." learn.callbacks.append(DistributedTrainer(learn, cuda_id)) learn.callbacks.append(DistributedRecorder(learn, cuda_id, cache_dir)) return learn
def _learner_distributed(learn:Learner, cuda_id:int, cache_dir:PathOrStr='tmp'): "Put `learn` on distributed training with `cuda_id`." learn.callbacks.append(DistributedTrainer(learn, cuda_id)) learn.callbacks.append(DistributedRecorder(learn, cuda_id, cache_dir)) return learn
[ "Put", "learn", "on", "distributed", "training", "with", "cuda_id", "." ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/distributed.py#L70-L74
[ "def", "_learner_distributed", "(", "learn", ":", "Learner", ",", "cuda_id", ":", "int", ",", "cache_dir", ":", "PathOrStr", "=", "'tmp'", ")", ":", "learn", ".", "callbacks", ".", "append", "(", "DistributedTrainer", "(", "learn", ",", "cuda_id", ")", ")", "learn", ".", "callbacks", ".", "append", "(", "DistributedRecorder", "(", "learn", ",", "cuda_id", ",", "cache_dir", ")", ")", "return", "learn" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
xresnet18
Constructs a XResNet-18 model. Args: pretrained (bool): If True, returns a model pre-trained on ImageNet
fastai/vision/models/xresnet2.py
def xresnet18(pretrained=False, **kwargs): """Constructs a XResNet-18 model. Args: pretrained (bool): If True, returns a model pre-trained on ImageNet """ model = XResNet(BasicBlock, [2, 2, 2, 2], **kwargs) if pretrained: model.load_state_dict(model_zoo.load_url(model_urls['xresnet18'])) return model
def xresnet18(pretrained=False, **kwargs): """Constructs a XResNet-18 model. Args: pretrained (bool): If True, returns a model pre-trained on ImageNet """ model = XResNet(BasicBlock, [2, 2, 2, 2], **kwargs) if pretrained: model.load_state_dict(model_zoo.load_url(model_urls['xresnet18'])) return model
[ "Constructs", "a", "XResNet", "-", "18", "model", "." ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/vision/models/xresnet2.py#L148-L156
[ "def", "xresnet18", "(", "pretrained", "=", "False", ",", "*", "*", "kwargs", ")", ":", "model", "=", "XResNet", "(", "BasicBlock", ",", "[", "2", ",", "2", ",", "2", ",", "2", "]", ",", "*", "*", "kwargs", ")", "if", "pretrained", ":", "model", ".", "load_state_dict", "(", "model_zoo", ".", "load_url", "(", "model_urls", "[", "'xresnet18'", "]", ")", ")", "return", "model" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
xresnet50_2
Constructs a XResNet-50 model. Args: pretrained (bool): If True, returns a model pre-trained on ImageNet
fastai/vision/models/xresnet2.py
def xresnet50_2(pretrained=False, **kwargs): """Constructs a XResNet-50 model. Args: pretrained (bool): If True, returns a model pre-trained on ImageNet """ model = XResNet(Bottleneck, [3, 4, 6, 3], **kwargs) if pretrained: model.load_state_dict(model_zoo.load_url(model_urls['xresnet50'])) return model
def xresnet50_2(pretrained=False, **kwargs): """Constructs a XResNet-50 model. Args: pretrained (bool): If True, returns a model pre-trained on ImageNet """ model = XResNet(Bottleneck, [3, 4, 6, 3], **kwargs) if pretrained: model.load_state_dict(model_zoo.load_url(model_urls['xresnet50'])) return model
[ "Constructs", "a", "XResNet", "-", "50", "model", "." ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/vision/models/xresnet2.py#L170-L178
[ "def", "xresnet50_2", "(", "pretrained", "=", "False", ",", "*", "*", "kwargs", ")", ":", "model", "=", "XResNet", "(", "Bottleneck", ",", "[", "3", ",", "4", ",", "6", ",", "3", "]", ",", "*", "*", "kwargs", ")", "if", "pretrained", ":", "model", ".", "load_state_dict", "(", "model_zoo", ".", "load_url", "(", "model_urls", "[", "'xresnet50'", "]", ")", ")", "return", "model" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
loss_batch
Calculate loss and metrics for a batch, call out to callbacks as necessary.
fastai/basic_train.py
def loss_batch(model:nn.Module, xb:Tensor, yb:Tensor, loss_func:OptLossFunc=None, opt:OptOptimizer=None, cb_handler:Optional[CallbackHandler]=None)->Tuple[Union[Tensor,int,float,str]]: "Calculate loss and metrics for a batch, call out to callbacks as necessary." cb_handler = ifnone(cb_handler, CallbackHandler()) if not is_listy(xb): xb = [xb] if not is_listy(yb): yb = [yb] out = model(*xb) out = cb_handler.on_loss_begin(out) if not loss_func: return to_detach(out), yb[0].detach() loss = loss_func(out, *yb) if opt is not None: loss,skip_bwd = cb_handler.on_backward_begin(loss) if not skip_bwd: loss.backward() if not cb_handler.on_backward_end(): opt.step() if not cb_handler.on_step_end(): opt.zero_grad() return loss.detach().cpu()
def loss_batch(model:nn.Module, xb:Tensor, yb:Tensor, loss_func:OptLossFunc=None, opt:OptOptimizer=None, cb_handler:Optional[CallbackHandler]=None)->Tuple[Union[Tensor,int,float,str]]: "Calculate loss and metrics for a batch, call out to callbacks as necessary." cb_handler = ifnone(cb_handler, CallbackHandler()) if not is_listy(xb): xb = [xb] if not is_listy(yb): yb = [yb] out = model(*xb) out = cb_handler.on_loss_begin(out) if not loss_func: return to_detach(out), yb[0].detach() loss = loss_func(out, *yb) if opt is not None: loss,skip_bwd = cb_handler.on_backward_begin(loss) if not skip_bwd: loss.backward() if not cb_handler.on_backward_end(): opt.step() if not cb_handler.on_step_end(): opt.zero_grad() return loss.detach().cpu()
[ "Calculate", "loss", "and", "metrics", "for", "a", "batch", "call", "out", "to", "callbacks", "as", "necessary", "." ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/basic_train.py#L20-L38
[ "def", "loss_batch", "(", "model", ":", "nn", ".", "Module", ",", "xb", ":", "Tensor", ",", "yb", ":", "Tensor", ",", "loss_func", ":", "OptLossFunc", "=", "None", ",", "opt", ":", "OptOptimizer", "=", "None", ",", "cb_handler", ":", "Optional", "[", "CallbackHandler", "]", "=", "None", ")", "->", "Tuple", "[", "Union", "[", "Tensor", ",", "int", ",", "float", ",", "str", "]", "]", ":", "cb_handler", "=", "ifnone", "(", "cb_handler", ",", "CallbackHandler", "(", ")", ")", "if", "not", "is_listy", "(", "xb", ")", ":", "xb", "=", "[", "xb", "]", "if", "not", "is_listy", "(", "yb", ")", ":", "yb", "=", "[", "yb", "]", "out", "=", "model", "(", "*", "xb", ")", "out", "=", "cb_handler", ".", "on_loss_begin", "(", "out", ")", "if", "not", "loss_func", ":", "return", "to_detach", "(", "out", ")", ",", "yb", "[", "0", "]", ".", "detach", "(", ")", "loss", "=", "loss_func", "(", "out", ",", "*", "yb", ")", "if", "opt", "is", "not", "None", ":", "loss", ",", "skip_bwd", "=", "cb_handler", ".", "on_backward_begin", "(", "loss", ")", "if", "not", "skip_bwd", ":", "loss", ".", "backward", "(", ")", "if", "not", "cb_handler", ".", "on_backward_end", "(", ")", ":", "opt", ".", "step", "(", ")", "if", "not", "cb_handler", ".", "on_step_end", "(", ")", ":", "opt", ".", "zero_grad", "(", ")", "return", "loss", ".", "detach", "(", ")", ".", "cpu", "(", ")" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
get_preds
Tuple of predictions and targets, and optional losses (if `loss_func`) using `dl`, max batches `n_batch`.
fastai/basic_train.py
def get_preds(model:nn.Module, dl:DataLoader, pbar:Optional[PBar]=None, cb_handler:Optional[CallbackHandler]=None, activ:nn.Module=None, loss_func:OptLossFunc=None, n_batch:Optional[int]=None) -> List[Tensor]: "Tuple of predictions and targets, and optional losses (if `loss_func`) using `dl`, max batches `n_batch`." res = [torch.cat(o).cpu() for o in zip(*validate(model, dl, cb_handler=cb_handler, pbar=pbar, average=False, n_batch=n_batch))] if loss_func is not None: with NoneReduceOnCPU(loss_func) as lf: res.append(lf(res[0], res[1])) if activ is not None: res[0] = activ(res[0]) return res
def get_preds(model:nn.Module, dl:DataLoader, pbar:Optional[PBar]=None, cb_handler:Optional[CallbackHandler]=None, activ:nn.Module=None, loss_func:OptLossFunc=None, n_batch:Optional[int]=None) -> List[Tensor]: "Tuple of predictions and targets, and optional losses (if `loss_func`) using `dl`, max batches `n_batch`." res = [torch.cat(o).cpu() for o in zip(*validate(model, dl, cb_handler=cb_handler, pbar=pbar, average=False, n_batch=n_batch))] if loss_func is not None: with NoneReduceOnCPU(loss_func) as lf: res.append(lf(res[0], res[1])) if activ is not None: res[0] = activ(res[0]) return res
[ "Tuple", "of", "predictions", "and", "targets", "and", "optional", "losses", "(", "if", "loss_func", ")", "using", "dl", "max", "batches", "n_batch", "." ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/basic_train.py#L40-L48
[ "def", "get_preds", "(", "model", ":", "nn", ".", "Module", ",", "dl", ":", "DataLoader", ",", "pbar", ":", "Optional", "[", "PBar", "]", "=", "None", ",", "cb_handler", ":", "Optional", "[", "CallbackHandler", "]", "=", "None", ",", "activ", ":", "nn", ".", "Module", "=", "None", ",", "loss_func", ":", "OptLossFunc", "=", "None", ",", "n_batch", ":", "Optional", "[", "int", "]", "=", "None", ")", "->", "List", "[", "Tensor", "]", ":", "res", "=", "[", "torch", ".", "cat", "(", "o", ")", ".", "cpu", "(", ")", "for", "o", "in", "zip", "(", "*", "validate", "(", "model", ",", "dl", ",", "cb_handler", "=", "cb_handler", ",", "pbar", "=", "pbar", ",", "average", "=", "False", ",", "n_batch", "=", "n_batch", ")", ")", "]", "if", "loss_func", "is", "not", "None", ":", "with", "NoneReduceOnCPU", "(", "loss_func", ")", "as", "lf", ":", "res", ".", "append", "(", "lf", "(", "res", "[", "0", "]", ",", "res", "[", "1", "]", ")", ")", "if", "activ", "is", "not", "None", ":", "res", "[", "0", "]", "=", "activ", "(", "res", "[", "0", "]", ")", "return", "res" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
validate
Calculate `loss_func` of `model` on `dl` in evaluation mode.
fastai/basic_train.py
def validate(model:nn.Module, dl:DataLoader, loss_func:OptLossFunc=None, cb_handler:Optional[CallbackHandler]=None, pbar:Optional[PBar]=None, average=True, n_batch:Optional[int]=None)->Iterator[Tuple[Union[Tensor,int],...]]: "Calculate `loss_func` of `model` on `dl` in evaluation mode." model.eval() with torch.no_grad(): val_losses,nums = [],[] if cb_handler: cb_handler.set_dl(dl) for xb,yb in progress_bar(dl, parent=pbar, leave=(pbar is not None)): if cb_handler: xb, yb = cb_handler.on_batch_begin(xb, yb, train=False) val_loss = loss_batch(model, xb, yb, loss_func, cb_handler=cb_handler) val_losses.append(val_loss) if not is_listy(yb): yb = [yb] nums.append(yb[0].shape[0]) if cb_handler and cb_handler.on_batch_end(val_losses[-1]): break if n_batch and (len(nums)>=n_batch): break nums = np.array(nums, dtype=np.float32) if average: return (to_np(torch.stack(val_losses)) * nums).sum() / nums.sum() else: return val_losses
def validate(model:nn.Module, dl:DataLoader, loss_func:OptLossFunc=None, cb_handler:Optional[CallbackHandler]=None, pbar:Optional[PBar]=None, average=True, n_batch:Optional[int]=None)->Iterator[Tuple[Union[Tensor,int],...]]: "Calculate `loss_func` of `model` on `dl` in evaluation mode." model.eval() with torch.no_grad(): val_losses,nums = [],[] if cb_handler: cb_handler.set_dl(dl) for xb,yb in progress_bar(dl, parent=pbar, leave=(pbar is not None)): if cb_handler: xb, yb = cb_handler.on_batch_begin(xb, yb, train=False) val_loss = loss_batch(model, xb, yb, loss_func, cb_handler=cb_handler) val_losses.append(val_loss) if not is_listy(yb): yb = [yb] nums.append(yb[0].shape[0]) if cb_handler and cb_handler.on_batch_end(val_losses[-1]): break if n_batch and (len(nums)>=n_batch): break nums = np.array(nums, dtype=np.float32) if average: return (to_np(torch.stack(val_losses)) * nums).sum() / nums.sum() else: return val_losses
[ "Calculate", "loss_func", "of", "model", "on", "dl", "in", "evaluation", "mode", "." ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/basic_train.py#L50-L67
[ "def", "validate", "(", "model", ":", "nn", ".", "Module", ",", "dl", ":", "DataLoader", ",", "loss_func", ":", "OptLossFunc", "=", "None", ",", "cb_handler", ":", "Optional", "[", "CallbackHandler", "]", "=", "None", ",", "pbar", ":", "Optional", "[", "PBar", "]", "=", "None", ",", "average", "=", "True", ",", "n_batch", ":", "Optional", "[", "int", "]", "=", "None", ")", "->", "Iterator", "[", "Tuple", "[", "Union", "[", "Tensor", ",", "int", "]", ",", "...", "]", "]", ":", "model", ".", "eval", "(", ")", "with", "torch", ".", "no_grad", "(", ")", ":", "val_losses", ",", "nums", "=", "[", "]", ",", "[", "]", "if", "cb_handler", ":", "cb_handler", ".", "set_dl", "(", "dl", ")", "for", "xb", ",", "yb", "in", "progress_bar", "(", "dl", ",", "parent", "=", "pbar", ",", "leave", "=", "(", "pbar", "is", "not", "None", ")", ")", ":", "if", "cb_handler", ":", "xb", ",", "yb", "=", "cb_handler", ".", "on_batch_begin", "(", "xb", ",", "yb", ",", "train", "=", "False", ")", "val_loss", "=", "loss_batch", "(", "model", ",", "xb", ",", "yb", ",", "loss_func", ",", "cb_handler", "=", "cb_handler", ")", "val_losses", ".", "append", "(", "val_loss", ")", "if", "not", "is_listy", "(", "yb", ")", ":", "yb", "=", "[", "yb", "]", "nums", ".", "append", "(", "yb", "[", "0", "]", ".", "shape", "[", "0", "]", ")", "if", "cb_handler", "and", "cb_handler", ".", "on_batch_end", "(", "val_losses", "[", "-", "1", "]", ")", ":", "break", "if", "n_batch", "and", "(", "len", "(", "nums", ")", ">=", "n_batch", ")", ":", "break", "nums", "=", "np", ".", "array", "(", "nums", ",", "dtype", "=", "np", ".", "float32", ")", "if", "average", ":", "return", "(", "to_np", "(", "torch", ".", "stack", "(", "val_losses", ")", ")", "*", "nums", ")", ".", "sum", "(", ")", "/", "nums", ".", "sum", "(", ")", "else", ":", "return", "val_losses" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
train_epoch
Simple training of `model` for 1 epoch of `dl` using optim `opt` and loss function `loss_func`.
fastai/basic_train.py
def train_epoch(model:nn.Module, dl:DataLoader, opt:optim.Optimizer, loss_func:LossFunction)->None: "Simple training of `model` for 1 epoch of `dl` using optim `opt` and loss function `loss_func`." model.train() for xb,yb in dl: loss = loss_func(model(xb), yb) loss.backward() opt.step() opt.zero_grad()
def train_epoch(model:nn.Module, dl:DataLoader, opt:optim.Optimizer, loss_func:LossFunction)->None: "Simple training of `model` for 1 epoch of `dl` using optim `opt` and loss function `loss_func`." model.train() for xb,yb in dl: loss = loss_func(model(xb), yb) loss.backward() opt.step() opt.zero_grad()
[ "Simple", "training", "of", "model", "for", "1", "epoch", "of", "dl", "using", "optim", "opt", "and", "loss", "function", "loss_func", "." ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/basic_train.py#L69-L76
[ "def", "train_epoch", "(", "model", ":", "nn", ".", "Module", ",", "dl", ":", "DataLoader", ",", "opt", ":", "optim", ".", "Optimizer", ",", "loss_func", ":", "LossFunction", ")", "->", "None", ":", "model", ".", "train", "(", ")", "for", "xb", ",", "yb", "in", "dl", ":", "loss", "=", "loss_func", "(", "model", "(", "xb", ")", ",", "yb", ")", "loss", ".", "backward", "(", ")", "opt", ".", "step", "(", ")", "opt", ".", "zero_grad", "(", ")" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
fit
Fit the `model` on `data` and learn using `loss_func` and `opt`.
fastai/basic_train.py
def fit(epochs:int, learn:BasicLearner, callbacks:Optional[CallbackList]=None, metrics:OptMetrics=None)->None: "Fit the `model` on `data` and learn using `loss_func` and `opt`." assert len(learn.data.train_dl) != 0, f"""Your training dataloader is empty, can't train a model. Use a smaller batch size (batch size={learn.data.train_dl.batch_size} for {len(learn.data.train_dl.dataset)} elements).""" cb_handler = CallbackHandler(callbacks, metrics) pbar = master_bar(range(epochs)) cb_handler.on_train_begin(epochs, pbar=pbar, metrics=metrics) exception=False try: for epoch in pbar: learn.model.train() cb_handler.set_dl(learn.data.train_dl) cb_handler.on_epoch_begin() for xb,yb in progress_bar(learn.data.train_dl, parent=pbar): xb, yb = cb_handler.on_batch_begin(xb, yb) loss = loss_batch(learn.model, xb, yb, learn.loss_func, learn.opt, cb_handler) if cb_handler.on_batch_end(loss): break if not cb_handler.skip_validate and not learn.data.empty_val: val_loss = validate(learn.model, learn.data.valid_dl, loss_func=learn.loss_func, cb_handler=cb_handler, pbar=pbar) else: val_loss=None if cb_handler.on_epoch_end(val_loss): break except Exception as e: exception = e raise finally: cb_handler.on_train_end(exception)
def fit(epochs:int, learn:BasicLearner, callbacks:Optional[CallbackList]=None, metrics:OptMetrics=None)->None: "Fit the `model` on `data` and learn using `loss_func` and `opt`." assert len(learn.data.train_dl) != 0, f"""Your training dataloader is empty, can't train a model. Use a smaller batch size (batch size={learn.data.train_dl.batch_size} for {len(learn.data.train_dl.dataset)} elements).""" cb_handler = CallbackHandler(callbacks, metrics) pbar = master_bar(range(epochs)) cb_handler.on_train_begin(epochs, pbar=pbar, metrics=metrics) exception=False try: for epoch in pbar: learn.model.train() cb_handler.set_dl(learn.data.train_dl) cb_handler.on_epoch_begin() for xb,yb in progress_bar(learn.data.train_dl, parent=pbar): xb, yb = cb_handler.on_batch_begin(xb, yb) loss = loss_batch(learn.model, xb, yb, learn.loss_func, learn.opt, cb_handler) if cb_handler.on_batch_end(loss): break if not cb_handler.skip_validate and not learn.data.empty_val: val_loss = validate(learn.model, learn.data.valid_dl, loss_func=learn.loss_func, cb_handler=cb_handler, pbar=pbar) else: val_loss=None if cb_handler.on_epoch_end(val_loss): break except Exception as e: exception = e raise finally: cb_handler.on_train_end(exception)
[ "Fit", "the", "model", "on", "data", "and", "learn", "using", "loss_func", "and", "opt", "." ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/basic_train.py#L85-L112
[ "def", "fit", "(", "epochs", ":", "int", ",", "learn", ":", "BasicLearner", ",", "callbacks", ":", "Optional", "[", "CallbackList", "]", "=", "None", ",", "metrics", ":", "OptMetrics", "=", "None", ")", "->", "None", ":", "assert", "len", "(", "learn", ".", "data", ".", "train_dl", ")", "!=", "0", ",", "f\"\"\"Your training dataloader is empty, can't train a model.\n Use a smaller batch size (batch size={learn.data.train_dl.batch_size} for {len(learn.data.train_dl.dataset)} elements).\"\"\"", "cb_handler", "=", "CallbackHandler", "(", "callbacks", ",", "metrics", ")", "pbar", "=", "master_bar", "(", "range", "(", "epochs", ")", ")", "cb_handler", ".", "on_train_begin", "(", "epochs", ",", "pbar", "=", "pbar", ",", "metrics", "=", "metrics", ")", "exception", "=", "False", "try", ":", "for", "epoch", "in", "pbar", ":", "learn", ".", "model", ".", "train", "(", ")", "cb_handler", ".", "set_dl", "(", "learn", ".", "data", ".", "train_dl", ")", "cb_handler", ".", "on_epoch_begin", "(", ")", "for", "xb", ",", "yb", "in", "progress_bar", "(", "learn", ".", "data", ".", "train_dl", ",", "parent", "=", "pbar", ")", ":", "xb", ",", "yb", "=", "cb_handler", ".", "on_batch_begin", "(", "xb", ",", "yb", ")", "loss", "=", "loss_batch", "(", "learn", ".", "model", ",", "xb", ",", "yb", ",", "learn", ".", "loss_func", ",", "learn", ".", "opt", ",", "cb_handler", ")", "if", "cb_handler", ".", "on_batch_end", "(", "loss", ")", ":", "break", "if", "not", "cb_handler", ".", "skip_validate", "and", "not", "learn", ".", "data", ".", "empty_val", ":", "val_loss", "=", "validate", "(", "learn", ".", "model", ",", "learn", ".", "data", ".", "valid_dl", ",", "loss_func", "=", "learn", ".", "loss_func", ",", "cb_handler", "=", "cb_handler", ",", "pbar", "=", "pbar", ")", "else", ":", "val_loss", "=", "None", "if", "cb_handler", ".", "on_epoch_end", "(", "val_loss", ")", ":", "break", "except", "Exception", "as", "e", ":", "exception", "=", "e", "raise", "finally", ":", "cb_handler", ".", "on_train_end", "(", "exception", ")" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
load_learner
Load a `Learner` object saved with `export_state` in `path/file` with empty data, optionally add `test` and load on `cpu`. `file` can be file-like (file or buffer)
fastai/basic_train.py
def load_learner(path:PathOrStr, file:PathLikeOrBinaryStream='export.pkl', test:ItemList=None, **db_kwargs): "Load a `Learner` object saved with `export_state` in `path/file` with empty data, optionally add `test` and load on `cpu`. `file` can be file-like (file or buffer)" source = Path(path)/file if is_pathlike(file) else file state = torch.load(source, map_location='cpu') if defaults.device == torch.device('cpu') else torch.load(source) model = state.pop('model') src = LabelLists.load_state(path, state.pop('data')) if test is not None: src.add_test(test) data = src.databunch(**db_kwargs) cb_state = state.pop('cb_state') clas_func = state.pop('cls') res = clas_func(data, model, **state) res.callback_fns = state['callback_fns'] #to avoid duplicates res.callbacks = [load_callback(c,s, res) for c,s in cb_state.items()] return res
def load_learner(path:PathOrStr, file:PathLikeOrBinaryStream='export.pkl', test:ItemList=None, **db_kwargs): "Load a `Learner` object saved with `export_state` in `path/file` with empty data, optionally add `test` and load on `cpu`. `file` can be file-like (file or buffer)" source = Path(path)/file if is_pathlike(file) else file state = torch.load(source, map_location='cpu') if defaults.device == torch.device('cpu') else torch.load(source) model = state.pop('model') src = LabelLists.load_state(path, state.pop('data')) if test is not None: src.add_test(test) data = src.databunch(**db_kwargs) cb_state = state.pop('cb_state') clas_func = state.pop('cls') res = clas_func(data, model, **state) res.callback_fns = state['callback_fns'] #to avoid duplicates res.callbacks = [load_callback(c,s, res) for c,s in cb_state.items()] return res
[ "Load", "a", "Learner", "object", "saved", "with", "export_state", "in", "path", "/", "file", "with", "empty", "data", "optionally", "add", "test", "and", "load", "on", "cpu", ".", "file", "can", "be", "file", "-", "like", "(", "file", "or", "buffer", ")" ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/basic_train.py#L596-L609
[ "def", "load_learner", "(", "path", ":", "PathOrStr", ",", "file", ":", "PathLikeOrBinaryStream", "=", "'export.pkl'", ",", "test", ":", "ItemList", "=", "None", ",", "*", "*", "db_kwargs", ")", ":", "source", "=", "Path", "(", "path", ")", "/", "file", "if", "is_pathlike", "(", "file", ")", "else", "file", "state", "=", "torch", ".", "load", "(", "source", ",", "map_location", "=", "'cpu'", ")", "if", "defaults", ".", "device", "==", "torch", ".", "device", "(", "'cpu'", ")", "else", "torch", ".", "load", "(", "source", ")", "model", "=", "state", ".", "pop", "(", "'model'", ")", "src", "=", "LabelLists", ".", "load_state", "(", "path", ",", "state", ".", "pop", "(", "'data'", ")", ")", "if", "test", "is", "not", "None", ":", "src", ".", "add_test", "(", "test", ")", "data", "=", "src", ".", "databunch", "(", "*", "*", "db_kwargs", ")", "cb_state", "=", "state", ".", "pop", "(", "'cb_state'", ")", "clas_func", "=", "state", ".", "pop", "(", "'cls'", ")", "res", "=", "clas_func", "(", "data", ",", "model", ",", "*", "*", "state", ")", "res", ".", "callback_fns", "=", "state", "[", "'callback_fns'", "]", "#to avoid duplicates", "res", ".", "callbacks", "=", "[", "load_callback", "(", "c", ",", "s", ",", "res", ")", "for", "c", ",", "s", "in", "cb_state", ".", "items", "(", ")", "]", "return", "res" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
Recorder.on_train_begin
Initialize recording status at beginning of training.
fastai/basic_train.py
def on_train_begin(self, pbar:PBar, metrics_names:Collection[str], **kwargs:Any)->None: "Initialize recording status at beginning of training." self.pbar = pbar self.names = ['epoch', 'train_loss'] if self.no_val else ['epoch', 'train_loss', 'valid_loss'] self.metrics_names = metrics_names self.names += self.metrics_names if hasattr(self, '_added_met_names'): self.names += self._added_met_names if self.add_time: self.names.append('time') if not self.silent: self.pbar.write(self.names, table=True) self.losses,self.val_losses,self.lrs,self.moms,self.metrics,self.nb_batches = [],[],[],[],[],[]
def on_train_begin(self, pbar:PBar, metrics_names:Collection[str], **kwargs:Any)->None: "Initialize recording status at beginning of training." self.pbar = pbar self.names = ['epoch', 'train_loss'] if self.no_val else ['epoch', 'train_loss', 'valid_loss'] self.metrics_names = metrics_names self.names += self.metrics_names if hasattr(self, '_added_met_names'): self.names += self._added_met_names if self.add_time: self.names.append('time') if not self.silent: self.pbar.write(self.names, table=True) self.losses,self.val_losses,self.lrs,self.moms,self.metrics,self.nb_batches = [],[],[],[],[],[]
[ "Initialize", "recording", "status", "at", "beginning", "of", "training", "." ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/basic_train.py#L447-L456
[ "def", "on_train_begin", "(", "self", ",", "pbar", ":", "PBar", ",", "metrics_names", ":", "Collection", "[", "str", "]", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "None", ":", "self", ".", "pbar", "=", "pbar", "self", ".", "names", "=", "[", "'epoch'", ",", "'train_loss'", "]", "if", "self", ".", "no_val", "else", "[", "'epoch'", ",", "'train_loss'", ",", "'valid_loss'", "]", "self", ".", "metrics_names", "=", "metrics_names", "self", ".", "names", "+=", "self", ".", "metrics_names", "if", "hasattr", "(", "self", ",", "'_added_met_names'", ")", ":", "self", ".", "names", "+=", "self", ".", "_added_met_names", "if", "self", ".", "add_time", ":", "self", ".", "names", ".", "append", "(", "'time'", ")", "if", "not", "self", ".", "silent", ":", "self", ".", "pbar", ".", "write", "(", "self", ".", "names", ",", "table", "=", "True", ")", "self", ".", "losses", ",", "self", ".", "val_losses", ",", "self", ".", "lrs", ",", "self", ".", "moms", ",", "self", ".", "metrics", ",", "self", ".", "nb_batches", "=", "[", "]", ",", "[", "]", ",", "[", "]", ",", "[", "]", ",", "[", "]", ",", "[", "]" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
Recorder.on_batch_begin
Record learning rate and momentum at beginning of batch.
fastai/basic_train.py
def on_batch_begin(self, train, **kwargs:Any)->None: "Record learning rate and momentum at beginning of batch." if train: self.lrs.append(self.opt.lr) self.moms.append(self.opt.mom)
def on_batch_begin(self, train, **kwargs:Any)->None: "Record learning rate and momentum at beginning of batch." if train: self.lrs.append(self.opt.lr) self.moms.append(self.opt.mom)
[ "Record", "learning", "rate", "and", "momentum", "at", "beginning", "of", "batch", "." ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/basic_train.py#L461-L465
[ "def", "on_batch_begin", "(", "self", ",", "train", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "None", ":", "if", "train", ":", "self", ".", "lrs", ".", "append", "(", "self", ".", "opt", ".", "lr", ")", "self", ".", "moms", ".", "append", "(", "self", ".", "opt", ".", "mom", ")" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
Recorder.on_backward_begin
Record the loss before any other callback has a chance to modify it.
fastai/basic_train.py
def on_backward_begin(self, smooth_loss:Tensor, **kwargs:Any)->None: "Record the loss before any other callback has a chance to modify it." self.losses.append(smooth_loss) if self.pbar is not None and hasattr(self.pbar,'child'): self.pbar.child.comment = f'{smooth_loss:.4f}'
def on_backward_begin(self, smooth_loss:Tensor, **kwargs:Any)->None: "Record the loss before any other callback has a chance to modify it." self.losses.append(smooth_loss) if self.pbar is not None and hasattr(self.pbar,'child'): self.pbar.child.comment = f'{smooth_loss:.4f}'
[ "Record", "the", "loss", "before", "any", "other", "callback", "has", "a", "chance", "to", "modify", "it", "." ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/basic_train.py#L467-L471
[ "def", "on_backward_begin", "(", "self", ",", "smooth_loss", ":", "Tensor", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "None", ":", "self", ".", "losses", ".", "append", "(", "smooth_loss", ")", "if", "self", ".", "pbar", "is", "not", "None", "and", "hasattr", "(", "self", ".", "pbar", ",", "'child'", ")", ":", "self", ".", "pbar", ".", "child", ".", "comment", "=", "f'{smooth_loss:.4f}'" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
Recorder.on_epoch_end
Save epoch info: num_batch, smooth_loss, metrics.
fastai/basic_train.py
def on_epoch_end(self, epoch:int, num_batch:int, smooth_loss:Tensor, last_metrics=MetricsList, **kwargs:Any)->bool: "Save epoch info: num_batch, smooth_loss, metrics." self.nb_batches.append(num_batch) if last_metrics is not None: self.val_losses.append(last_metrics[0]) else: last_metrics = [] if self.no_val else [None] if len(last_metrics) > 1: self.metrics.append(last_metrics[1:]) self.format_stats([epoch, smooth_loss] + last_metrics)
def on_epoch_end(self, epoch:int, num_batch:int, smooth_loss:Tensor, last_metrics=MetricsList, **kwargs:Any)->bool: "Save epoch info: num_batch, smooth_loss, metrics." self.nb_batches.append(num_batch) if last_metrics is not None: self.val_losses.append(last_metrics[0]) else: last_metrics = [] if self.no_val else [None] if len(last_metrics) > 1: self.metrics.append(last_metrics[1:]) self.format_stats([epoch, smooth_loss] + last_metrics)
[ "Save", "epoch", "info", ":", "num_batch", "smooth_loss", "metrics", "." ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/basic_train.py#L473-L480
[ "def", "on_epoch_end", "(", "self", ",", "epoch", ":", "int", ",", "num_batch", ":", "int", ",", "smooth_loss", ":", "Tensor", ",", "last_metrics", "=", "MetricsList", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "bool", ":", "self", ".", "nb_batches", ".", "append", "(", "num_batch", ")", "if", "last_metrics", "is", "not", "None", ":", "self", ".", "val_losses", ".", "append", "(", "last_metrics", "[", "0", "]", ")", "else", ":", "last_metrics", "=", "[", "]", "if", "self", ".", "no_val", "else", "[", "None", "]", "if", "len", "(", "last_metrics", ")", ">", "1", ":", "self", ".", "metrics", ".", "append", "(", "last_metrics", "[", "1", ":", "]", ")", "self", ".", "format_stats", "(", "[", "epoch", ",", "smooth_loss", "]", "+", "last_metrics", ")" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
Recorder.format_stats
Format stats before printing.
fastai/basic_train.py
def format_stats(self, stats:TensorOrNumList)->None: "Format stats before printing." str_stats = [] for name,stat in zip(self.names,stats): str_stats.append('#na#' if stat is None else str(stat) if isinstance(stat, int) else f'{stat:.6f}') if self.add_time: str_stats.append(format_time(time() - self.start_epoch)) if not self.silent: self.pbar.write(str_stats, table=True)
def format_stats(self, stats:TensorOrNumList)->None: "Format stats before printing." str_stats = [] for name,stat in zip(self.names,stats): str_stats.append('#na#' if stat is None else str(stat) if isinstance(stat, int) else f'{stat:.6f}') if self.add_time: str_stats.append(format_time(time() - self.start_epoch)) if not self.silent: self.pbar.write(str_stats, table=True)
[ "Format", "stats", "before", "printing", "." ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/basic_train.py#L482-L488
[ "def", "format_stats", "(", "self", ",", "stats", ":", "TensorOrNumList", ")", "->", "None", ":", "str_stats", "=", "[", "]", "for", "name", ",", "stat", "in", "zip", "(", "self", ".", "names", ",", "stats", ")", ":", "str_stats", ".", "append", "(", "'#na#'", "if", "stat", "is", "None", "else", "str", "(", "stat", ")", "if", "isinstance", "(", "stat", ",", "int", ")", "else", "f'{stat:.6f}'", ")", "if", "self", ".", "add_time", ":", "str_stats", ".", "append", "(", "format_time", "(", "time", "(", ")", "-", "self", ".", "start_epoch", ")", ")", "if", "not", "self", ".", "silent", ":", "self", ".", "pbar", ".", "write", "(", "str_stats", ",", "table", "=", "True", ")" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
Recorder.add_metric_names
Add `names` to the inner metric names.
fastai/basic_train.py
def add_metric_names(self, names): "Add `names` to the inner metric names." if hasattr(self, '_added_met_names'): self._added_met_names += names else: self._added_met_names = names
def add_metric_names(self, names): "Add `names` to the inner metric names." if hasattr(self, '_added_met_names'): self._added_met_names += names else: self._added_met_names = names
[ "Add", "names", "to", "the", "inner", "metric", "names", "." ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/basic_train.py#L490-L493
[ "def", "add_metric_names", "(", "self", ",", "names", ")", ":", "if", "hasattr", "(", "self", ",", "'_added_met_names'", ")", ":", "self", ".", "_added_met_names", "+=", "names", "else", ":", "self", ".", "_added_met_names", "=", "names" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
Recorder.plot_lr
Plot learning rate, `show_moms` to include momentum.
fastai/basic_train.py
def plot_lr(self, show_moms=False, skip_start:int=0, skip_end:int=0, return_fig:bool=None)->Optional[plt.Figure]: "Plot learning rate, `show_moms` to include momentum." lrs = self._split_list(self.lrs, skip_start, skip_end) iterations = self._split_list(range_of(self.lrs), skip_start, skip_end) if show_moms: moms = self._split_list(self.moms, skip_start, skip_end) fig, axs = plt.subplots(1,2, figsize=(12,4)) axs[0].plot(iterations, lrs) axs[0].set_xlabel('Iterations') axs[0].set_ylabel('Learning Rate') axs[1].plot(iterations, moms) axs[1].set_xlabel('Iterations') axs[1].set_ylabel('Momentum') else: fig, ax = plt.subplots() ax.plot(iterations, lrs) ax.set_xlabel('Iterations') ax.set_ylabel('Learning Rate') if ifnone(return_fig, defaults.return_fig): return fig if not IN_NOTEBOOK: plot_sixel(fig)
def plot_lr(self, show_moms=False, skip_start:int=0, skip_end:int=0, return_fig:bool=None)->Optional[plt.Figure]: "Plot learning rate, `show_moms` to include momentum." lrs = self._split_list(self.lrs, skip_start, skip_end) iterations = self._split_list(range_of(self.lrs), skip_start, skip_end) if show_moms: moms = self._split_list(self.moms, skip_start, skip_end) fig, axs = plt.subplots(1,2, figsize=(12,4)) axs[0].plot(iterations, lrs) axs[0].set_xlabel('Iterations') axs[0].set_ylabel('Learning Rate') axs[1].plot(iterations, moms) axs[1].set_xlabel('Iterations') axs[1].set_ylabel('Momentum') else: fig, ax = plt.subplots() ax.plot(iterations, lrs) ax.set_xlabel('Iterations') ax.set_ylabel('Learning Rate') if ifnone(return_fig, defaults.return_fig): return fig if not IN_NOTEBOOK: plot_sixel(fig)
[ "Plot", "learning", "rate", "show_moms", "to", "include", "momentum", "." ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/basic_train.py#L495-L514
[ "def", "plot_lr", "(", "self", ",", "show_moms", "=", "False", ",", "skip_start", ":", "int", "=", "0", ",", "skip_end", ":", "int", "=", "0", ",", "return_fig", ":", "bool", "=", "None", ")", "->", "Optional", "[", "plt", ".", "Figure", "]", ":", "lrs", "=", "self", ".", "_split_list", "(", "self", ".", "lrs", ",", "skip_start", ",", "skip_end", ")", "iterations", "=", "self", ".", "_split_list", "(", "range_of", "(", "self", ".", "lrs", ")", ",", "skip_start", ",", "skip_end", ")", "if", "show_moms", ":", "moms", "=", "self", ".", "_split_list", "(", "self", ".", "moms", ",", "skip_start", ",", "skip_end", ")", "fig", ",", "axs", "=", "plt", ".", "subplots", "(", "1", ",", "2", ",", "figsize", "=", "(", "12", ",", "4", ")", ")", "axs", "[", "0", "]", ".", "plot", "(", "iterations", ",", "lrs", ")", "axs", "[", "0", "]", ".", "set_xlabel", "(", "'Iterations'", ")", "axs", "[", "0", "]", ".", "set_ylabel", "(", "'Learning Rate'", ")", "axs", "[", "1", "]", ".", "plot", "(", "iterations", ",", "moms", ")", "axs", "[", "1", "]", ".", "set_xlabel", "(", "'Iterations'", ")", "axs", "[", "1", "]", ".", "set_ylabel", "(", "'Momentum'", ")", "else", ":", "fig", ",", "ax", "=", "plt", ".", "subplots", "(", ")", "ax", ".", "plot", "(", "iterations", ",", "lrs", ")", "ax", ".", "set_xlabel", "(", "'Iterations'", ")", "ax", ".", "set_ylabel", "(", "'Learning Rate'", ")", "if", "ifnone", "(", "return_fig", ",", "defaults", ".", "return_fig", ")", ":", "return", "fig", "if", "not", "IN_NOTEBOOK", ":", "plot_sixel", "(", "fig", ")" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
Recorder.plot
Plot learning rate and losses, trimmed between `skip_start` and `skip_end`. Optionally plot and return min gradient
fastai/basic_train.py
def plot(self, skip_start:int=10, skip_end:int=5, suggestion:bool=False, return_fig:bool=None, **kwargs)->Optional[plt.Figure]: "Plot learning rate and losses, trimmed between `skip_start` and `skip_end`. Optionally plot and return min gradient" lrs = self._split_list(self.lrs, skip_start, skip_end) losses = self._split_list(self.losses, skip_start, skip_end) losses = [x.item() for x in losses] if 'k' in kwargs: losses = self.smoothen_by_spline(lrs, losses, **kwargs) fig, ax = plt.subplots(1,1) ax.plot(lrs, losses) ax.set_ylabel("Loss") ax.set_xlabel("Learning Rate") ax.set_xscale('log') ax.xaxis.set_major_formatter(plt.FormatStrFormatter('%.0e')) if suggestion: try: mg = (np.gradient(np.array(losses))).argmin() except: print("Failed to compute the gradients, there might not be enough points.") return print(f"Min numerical gradient: {lrs[mg]:.2E}") ax.plot(lrs[mg],losses[mg],markersize=10,marker='o',color='red') self.min_grad_lr = lrs[mg] if ifnone(return_fig, defaults.return_fig): return fig if not IN_NOTEBOOK: plot_sixel(fig)
def plot(self, skip_start:int=10, skip_end:int=5, suggestion:bool=False, return_fig:bool=None, **kwargs)->Optional[plt.Figure]: "Plot learning rate and losses, trimmed between `skip_start` and `skip_end`. Optionally plot and return min gradient" lrs = self._split_list(self.lrs, skip_start, skip_end) losses = self._split_list(self.losses, skip_start, skip_end) losses = [x.item() for x in losses] if 'k' in kwargs: losses = self.smoothen_by_spline(lrs, losses, **kwargs) fig, ax = plt.subplots(1,1) ax.plot(lrs, losses) ax.set_ylabel("Loss") ax.set_xlabel("Learning Rate") ax.set_xscale('log') ax.xaxis.set_major_formatter(plt.FormatStrFormatter('%.0e')) if suggestion: try: mg = (np.gradient(np.array(losses))).argmin() except: print("Failed to compute the gradients, there might not be enough points.") return print(f"Min numerical gradient: {lrs[mg]:.2E}") ax.plot(lrs[mg],losses[mg],markersize=10,marker='o',color='red') self.min_grad_lr = lrs[mg] if ifnone(return_fig, defaults.return_fig): return fig if not IN_NOTEBOOK: plot_sixel(fig)
[ "Plot", "learning", "rate", "and", "losses", "trimmed", "between", "skip_start", "and", "skip_end", ".", "Optionally", "plot", "and", "return", "min", "gradient" ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/basic_train.py#L523-L545
[ "def", "plot", "(", "self", ",", "skip_start", ":", "int", "=", "10", ",", "skip_end", ":", "int", "=", "5", ",", "suggestion", ":", "bool", "=", "False", ",", "return_fig", ":", "bool", "=", "None", ",", "*", "*", "kwargs", ")", "->", "Optional", "[", "plt", ".", "Figure", "]", ":", "lrs", "=", "self", ".", "_split_list", "(", "self", ".", "lrs", ",", "skip_start", ",", "skip_end", ")", "losses", "=", "self", ".", "_split_list", "(", "self", ".", "losses", ",", "skip_start", ",", "skip_end", ")", "losses", "=", "[", "x", ".", "item", "(", ")", "for", "x", "in", "losses", "]", "if", "'k'", "in", "kwargs", ":", "losses", "=", "self", ".", "smoothen_by_spline", "(", "lrs", ",", "losses", ",", "*", "*", "kwargs", ")", "fig", ",", "ax", "=", "plt", ".", "subplots", "(", "1", ",", "1", ")", "ax", ".", "plot", "(", "lrs", ",", "losses", ")", "ax", ".", "set_ylabel", "(", "\"Loss\"", ")", "ax", ".", "set_xlabel", "(", "\"Learning Rate\"", ")", "ax", ".", "set_xscale", "(", "'log'", ")", "ax", ".", "xaxis", ".", "set_major_formatter", "(", "plt", ".", "FormatStrFormatter", "(", "'%.0e'", ")", ")", "if", "suggestion", ":", "try", ":", "mg", "=", "(", "np", ".", "gradient", "(", "np", ".", "array", "(", "losses", ")", ")", ")", ".", "argmin", "(", ")", "except", ":", "print", "(", "\"Failed to compute the gradients, there might not be enough points.\"", ")", "return", "print", "(", "f\"Min numerical gradient: {lrs[mg]:.2E}\"", ")", "ax", ".", "plot", "(", "lrs", "[", "mg", "]", ",", "losses", "[", "mg", "]", ",", "markersize", "=", "10", ",", "marker", "=", "'o'", ",", "color", "=", "'red'", ")", "self", ".", "min_grad_lr", "=", "lrs", "[", "mg", "]", "if", "ifnone", "(", "return_fig", ",", "defaults", ".", "return_fig", ")", ":", "return", "fig", "if", "not", "IN_NOTEBOOK", ":", "plot_sixel", "(", "fig", ")" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
Recorder.plot_losses
Plot training and validation losses.
fastai/basic_train.py
def plot_losses(self, skip_start:int=0, skip_end:int=0, return_fig:bool=None)->Optional[plt.Figure]: "Plot training and validation losses." fig, ax = plt.subplots(1,1) losses = self._split_list(self.losses, skip_start, skip_end) iterations = self._split_list(range_of(self.losses), skip_start, skip_end) ax.plot(iterations, losses, label='Train') val_iter = self._split_list_val(np.cumsum(self.nb_batches), skip_start, skip_end) val_losses = self._split_list_val(self.val_losses, skip_start, skip_end) ax.plot(val_iter, val_losses, label='Validation') ax.set_ylabel('Loss') ax.set_xlabel('Batches processed') ax.legend() if ifnone(return_fig, defaults.return_fig): return fig if not IN_NOTEBOOK: plot_sixel(fig)
def plot_losses(self, skip_start:int=0, skip_end:int=0, return_fig:bool=None)->Optional[plt.Figure]: "Plot training and validation losses." fig, ax = plt.subplots(1,1) losses = self._split_list(self.losses, skip_start, skip_end) iterations = self._split_list(range_of(self.losses), skip_start, skip_end) ax.plot(iterations, losses, label='Train') val_iter = self._split_list_val(np.cumsum(self.nb_batches), skip_start, skip_end) val_losses = self._split_list_val(self.val_losses, skip_start, skip_end) ax.plot(val_iter, val_losses, label='Validation') ax.set_ylabel('Loss') ax.set_xlabel('Batches processed') ax.legend() if ifnone(return_fig, defaults.return_fig): return fig if not IN_NOTEBOOK: plot_sixel(fig)
[ "Plot", "training", "and", "validation", "losses", "." ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/basic_train.py#L547-L560
[ "def", "plot_losses", "(", "self", ",", "skip_start", ":", "int", "=", "0", ",", "skip_end", ":", "int", "=", "0", ",", "return_fig", ":", "bool", "=", "None", ")", "->", "Optional", "[", "plt", ".", "Figure", "]", ":", "fig", ",", "ax", "=", "plt", ".", "subplots", "(", "1", ",", "1", ")", "losses", "=", "self", ".", "_split_list", "(", "self", ".", "losses", ",", "skip_start", ",", "skip_end", ")", "iterations", "=", "self", ".", "_split_list", "(", "range_of", "(", "self", ".", "losses", ")", ",", "skip_start", ",", "skip_end", ")", "ax", ".", "plot", "(", "iterations", ",", "losses", ",", "label", "=", "'Train'", ")", "val_iter", "=", "self", ".", "_split_list_val", "(", "np", ".", "cumsum", "(", "self", ".", "nb_batches", ")", ",", "skip_start", ",", "skip_end", ")", "val_losses", "=", "self", ".", "_split_list_val", "(", "self", ".", "val_losses", ",", "skip_start", ",", "skip_end", ")", "ax", ".", "plot", "(", "val_iter", ",", "val_losses", ",", "label", "=", "'Validation'", ")", "ax", ".", "set_ylabel", "(", "'Loss'", ")", "ax", ".", "set_xlabel", "(", "'Batches processed'", ")", "ax", ".", "legend", "(", ")", "if", "ifnone", "(", "return_fig", ",", "defaults", ".", "return_fig", ")", ":", "return", "fig", "if", "not", "IN_NOTEBOOK", ":", "plot_sixel", "(", "fig", ")" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
Recorder.plot_metrics
Plot metrics collected during training.
fastai/basic_train.py
def plot_metrics(self, skip_start:int=0, skip_end:int=0, return_fig:bool=None)->Optional[plt.Figure]: "Plot metrics collected during training." assert len(self.metrics) != 0, "There are no metrics to plot." fig, axes = plt.subplots(len(self.metrics[0]),1,figsize=(6, 4*len(self.metrics[0]))) val_iter = self._split_list_val(np.cumsum(self.nb_batches), skip_start, skip_end) axes = axes.flatten() if len(self.metrics[0]) != 1 else [axes] for i, ax in enumerate(axes): values = [met[i] for met in self.metrics] values = self._split_list_val(values, skip_start, skip_end) ax.plot(val_iter, values) ax.set_ylabel(str(self.metrics_names[i])) ax.set_xlabel('Batches processed') if ifnone(return_fig, defaults.return_fig): return fig if not IN_NOTEBOOK: plot_sixel(fig)
def plot_metrics(self, skip_start:int=0, skip_end:int=0, return_fig:bool=None)->Optional[plt.Figure]: "Plot metrics collected during training." assert len(self.metrics) != 0, "There are no metrics to plot." fig, axes = plt.subplots(len(self.metrics[0]),1,figsize=(6, 4*len(self.metrics[0]))) val_iter = self._split_list_val(np.cumsum(self.nb_batches), skip_start, skip_end) axes = axes.flatten() if len(self.metrics[0]) != 1 else [axes] for i, ax in enumerate(axes): values = [met[i] for met in self.metrics] values = self._split_list_val(values, skip_start, skip_end) ax.plot(val_iter, values) ax.set_ylabel(str(self.metrics_names[i])) ax.set_xlabel('Batches processed') if ifnone(return_fig, defaults.return_fig): return fig if not IN_NOTEBOOK: plot_sixel(fig)
[ "Plot", "metrics", "collected", "during", "training", "." ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/basic_train.py#L562-L575
[ "def", "plot_metrics", "(", "self", ",", "skip_start", ":", "int", "=", "0", ",", "skip_end", ":", "int", "=", "0", ",", "return_fig", ":", "bool", "=", "None", ")", "->", "Optional", "[", "plt", ".", "Figure", "]", ":", "assert", "len", "(", "self", ".", "metrics", ")", "!=", "0", ",", "\"There are no metrics to plot.\"", "fig", ",", "axes", "=", "plt", ".", "subplots", "(", "len", "(", "self", ".", "metrics", "[", "0", "]", ")", ",", "1", ",", "figsize", "=", "(", "6", ",", "4", "*", "len", "(", "self", ".", "metrics", "[", "0", "]", ")", ")", ")", "val_iter", "=", "self", ".", "_split_list_val", "(", "np", ".", "cumsum", "(", "self", ".", "nb_batches", ")", ",", "skip_start", ",", "skip_end", ")", "axes", "=", "axes", ".", "flatten", "(", ")", "if", "len", "(", "self", ".", "metrics", "[", "0", "]", ")", "!=", "1", "else", "[", "axes", "]", "for", "i", ",", "ax", "in", "enumerate", "(", "axes", ")", ":", "values", "=", "[", "met", "[", "i", "]", "for", "met", "in", "self", ".", "metrics", "]", "values", "=", "self", ".", "_split_list_val", "(", "values", ",", "skip_start", ",", "skip_end", ")", "ax", ".", "plot", "(", "val_iter", ",", "values", ")", "ax", ".", "set_ylabel", "(", "str", "(", "self", ".", "metrics_names", "[", "i", "]", ")", ")", "ax", ".", "set_xlabel", "(", "'Batches processed'", ")", "if", "ifnone", "(", "return_fig", ",", "defaults", ".", "return_fig", ")", ":", "return", "fig", "if", "not", "IN_NOTEBOOK", ":", "plot_sixel", "(", "fig", ")" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
anno_parser
Look at params (annotated with `Param`) in func and return an `ArgumentParser`
fastai/script.py
def anno_parser(func): "Look at params (annotated with `Param`) in func and return an `ArgumentParser`" p = ArgumentParser(description=func.__doc__) for k,v in inspect.signature(func).parameters.items(): param = func.__annotations__.get(k, Param()) kwargs = param.kwargs if v.default != inspect.Parameter.empty: kwargs['default'] = v.default p.add_argument(f"{param.pre}{k}", **kwargs) return p
def anno_parser(func): "Look at params (annotated with `Param`) in func and return an `ArgumentParser`" p = ArgumentParser(description=func.__doc__) for k,v in inspect.signature(func).parameters.items(): param = func.__annotations__.get(k, Param()) kwargs = param.kwargs if v.default != inspect.Parameter.empty: kwargs['default'] = v.default p.add_argument(f"{param.pre}{k}", **kwargs) return p
[ "Look", "at", "params", "(", "annotated", "with", "Param", ")", "in", "func", "and", "return", "an", "ArgumentParser" ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/script.py#L25-L33
[ "def", "anno_parser", "(", "func", ")", ":", "p", "=", "ArgumentParser", "(", "description", "=", "func", ".", "__doc__", ")", "for", "k", ",", "v", "in", "inspect", ".", "signature", "(", "func", ")", ".", "parameters", ".", "items", "(", ")", ":", "param", "=", "func", ".", "__annotations__", ".", "get", "(", "k", ",", "Param", "(", ")", ")", "kwargs", "=", "param", ".", "kwargs", "if", "v", ".", "default", "!=", "inspect", ".", "Parameter", ".", "empty", ":", "kwargs", "[", "'default'", "]", "=", "v", ".", "default", "p", ".", "add_argument", "(", "f\"{param.pre}{k}\"", ",", "*", "*", "kwargs", ")", "return", "p" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
call_parse
Decorator to create a simple CLI from `func` using `anno_parser`
fastai/script.py
def call_parse(func): "Decorator to create a simple CLI from `func` using `anno_parser`" name = inspect.currentframe().f_back.f_globals['__name__'] if name == "__main__": args = anno_parser(func).parse_args() func(**args.__dict__) else: return func
def call_parse(func): "Decorator to create a simple CLI from `func` using `anno_parser`" name = inspect.currentframe().f_back.f_globals['__name__'] if name == "__main__": args = anno_parser(func).parse_args() func(**args.__dict__) else: return func
[ "Decorator", "to", "create", "a", "simple", "CLI", "from", "func", "using", "anno_parser" ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/script.py#L35-L41
[ "def", "call_parse", "(", "func", ")", ":", "name", "=", "inspect", ".", "currentframe", "(", ")", ".", "f_back", ".", "f_globals", "[", "'__name__'", "]", "if", "name", "==", "\"__main__\"", ":", "args", "=", "anno_parser", "(", "func", ")", ".", "parse_args", "(", ")", "func", "(", "*", "*", "args", ".", "__dict__", ")", "else", ":", "return", "func" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
call_plac
Decorator to create a simple CLI from `func` using `plac`
fastai/script.py
def call_plac(f): "Decorator to create a simple CLI from `func` using `plac`" name = inspect.currentframe().f_back.f_globals['__name__'] if name == '__main__': import plac res = plac.call(f) if callable(res): res() else: return f
def call_plac(f): "Decorator to create a simple CLI from `func` using `plac`" name = inspect.currentframe().f_back.f_globals['__name__'] if name == '__main__': import plac res = plac.call(f) if callable(res): res() else: return f
[ "Decorator", "to", "create", "a", "simple", "CLI", "from", "func", "using", "plac" ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/script.py#L43-L50
[ "def", "call_plac", "(", "f", ")", ":", "name", "=", "inspect", ".", "currentframe", "(", ")", ".", "f_back", ".", "f_globals", "[", "'__name__'", "]", "if", "name", "==", "'__main__'", ":", "import", "plac", "res", "=", "plac", ".", "call", "(", "f", ")", "if", "callable", "(", "res", ")", ":", "res", "(", ")", "else", ":", "return", "f" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
numericalize_tok
Takes in text tokens and returns int2tok and tok2int converters Arguments: tokens(list): List of tokens. Can be a list of strings, or a list of lists of strings. max_vocab(int): Number of tokens to return in the vocab (sorted by frequency) min_freq(int): Minimum number of instances a token must be present in order to be preserved. unk_tok(str): Token to use when unknown tokens are encountered in the source text. pad_tok(str): Token to use when padding sequences.
old/fastai/text.py
def numericalize_tok(tokens, max_vocab=50000, min_freq=0, unk_tok="_unk_", pad_tok="_pad_", bos_tok="_bos_", eos_tok="_eos_"): """Takes in text tokens and returns int2tok and tok2int converters Arguments: tokens(list): List of tokens. Can be a list of strings, or a list of lists of strings. max_vocab(int): Number of tokens to return in the vocab (sorted by frequency) min_freq(int): Minimum number of instances a token must be present in order to be preserved. unk_tok(str): Token to use when unknown tokens are encountered in the source text. pad_tok(str): Token to use when padding sequences. """ if isinstance(tokens, str): raise ValueError("Expected to receive a list of tokens. Received a string instead") if isinstance(tokens[0], list): tokens = [p for o in tokens for p in o] freq = Counter(tokens) int2tok = [o for o,c in freq.most_common(max_vocab) if c>min_freq] unk_id = 3 int2tok.insert(0, bos_tok) int2tok.insert(1, pad_tok) int2tok.insert(2, eos_tok) int2tok.insert(unk_id, unk_tok) tok2int = collections.defaultdict(lambda:unk_id, {v:k for k,v in enumerate(int2tok)}) return int2tok, tok2int
def numericalize_tok(tokens, max_vocab=50000, min_freq=0, unk_tok="_unk_", pad_tok="_pad_", bos_tok="_bos_", eos_tok="_eos_"): """Takes in text tokens and returns int2tok and tok2int converters Arguments: tokens(list): List of tokens. Can be a list of strings, or a list of lists of strings. max_vocab(int): Number of tokens to return in the vocab (sorted by frequency) min_freq(int): Minimum number of instances a token must be present in order to be preserved. unk_tok(str): Token to use when unknown tokens are encountered in the source text. pad_tok(str): Token to use when padding sequences. """ if isinstance(tokens, str): raise ValueError("Expected to receive a list of tokens. Received a string instead") if isinstance(tokens[0], list): tokens = [p for o in tokens for p in o] freq = Counter(tokens) int2tok = [o for o,c in freq.most_common(max_vocab) if c>min_freq] unk_id = 3 int2tok.insert(0, bos_tok) int2tok.insert(1, pad_tok) int2tok.insert(2, eos_tok) int2tok.insert(unk_id, unk_tok) tok2int = collections.defaultdict(lambda:unk_id, {v:k for k,v in enumerate(int2tok)}) return int2tok, tok2int
[ "Takes", "in", "text", "tokens", "and", "returns", "int2tok", "and", "tok2int", "converters" ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/old/fastai/text.py#L19-L41
[ "def", "numericalize_tok", "(", "tokens", ",", "max_vocab", "=", "50000", ",", "min_freq", "=", "0", ",", "unk_tok", "=", "\"_unk_\"", ",", "pad_tok", "=", "\"_pad_\"", ",", "bos_tok", "=", "\"_bos_\"", ",", "eos_tok", "=", "\"_eos_\"", ")", ":", "if", "isinstance", "(", "tokens", ",", "str", ")", ":", "raise", "ValueError", "(", "\"Expected to receive a list of tokens. Received a string instead\"", ")", "if", "isinstance", "(", "tokens", "[", "0", "]", ",", "list", ")", ":", "tokens", "=", "[", "p", "for", "o", "in", "tokens", "for", "p", "in", "o", "]", "freq", "=", "Counter", "(", "tokens", ")", "int2tok", "=", "[", "o", "for", "o", ",", "c", "in", "freq", ".", "most_common", "(", "max_vocab", ")", "if", "c", ">", "min_freq", "]", "unk_id", "=", "3", "int2tok", ".", "insert", "(", "0", ",", "bos_tok", ")", "int2tok", ".", "insert", "(", "1", ",", "pad_tok", ")", "int2tok", ".", "insert", "(", "2", ",", "eos_tok", ")", "int2tok", ".", "insert", "(", "unk_id", ",", "unk_tok", ")", "tok2int", "=", "collections", ".", "defaultdict", "(", "lambda", ":", "unk_id", ",", "{", "v", ":", "k", "for", "k", ",", "v", "in", "enumerate", "(", "int2tok", ")", "}", ")", "return", "int2tok", ",", "tok2int" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
QRNN.reset
If your convolutional window is greater than 1 and you save previous xs, you must reset at the beginning of each new sequence.
fastai/text/models/qrnn.py
def reset(self): "If your convolutional window is greater than 1 and you save previous xs, you must reset at the beginning of each new sequence." for layer in self.layers: layer.reset() if self.bidirectional: for layer in self.layers_bwd: layer.reset()
def reset(self): "If your convolutional window is greater than 1 and you save previous xs, you must reset at the beginning of each new sequence." for layer in self.layers: layer.reset() if self.bidirectional: for layer in self.layers_bwd: layer.reset()
[ "If", "your", "convolutional", "window", "is", "greater", "than", "1", "and", "you", "save", "previous", "xs", "you", "must", "reset", "at", "the", "beginning", "of", "each", "new", "sequence", "." ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/text/models/qrnn.py#L148-L152
[ "def", "reset", "(", "self", ")", ":", "for", "layer", "in", "self", ".", "layers", ":", "layer", ".", "reset", "(", ")", "if", "self", ".", "bidirectional", ":", "for", "layer", "in", "self", ".", "layers_bwd", ":", "layer", ".", "reset", "(", ")" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
start_new_kernel
Start a new kernel, and return its Manager and Client
docs_src/nbval/kernel.py
def start_new_kernel(startup_timeout=60, kernel_name='python', **kwargs): """Start a new kernel, and return its Manager and Client""" logger.debug('Starting new kernel: "%s"' % kernel_name) km = KernelManager(kernel_name=kernel_name, kernel_spec_manager=NbvalKernelspecManager()) km.start_kernel(**kwargs) kc = km.client() kc.start_channels() try: kc.wait_for_ready(timeout=startup_timeout) except RuntimeError: logger.exception('Failure starting kernel "%s"', kernel_name) kc.stop_channels() km.shutdown_kernel() raise return km, kc
def start_new_kernel(startup_timeout=60, kernel_name='python', **kwargs): """Start a new kernel, and return its Manager and Client""" logger.debug('Starting new kernel: "%s"' % kernel_name) km = KernelManager(kernel_name=kernel_name, kernel_spec_manager=NbvalKernelspecManager()) km.start_kernel(**kwargs) kc = km.client() kc.start_channels() try: kc.wait_for_ready(timeout=startup_timeout) except RuntimeError: logger.exception('Failure starting kernel "%s"', kernel_name) kc.stop_channels() km.shutdown_kernel() raise return km, kc
[ "Start", "a", "new", "kernel", "and", "return", "its", "Manager", "and", "Client" ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/docs_src/nbval/kernel.py#L48-L64
[ "def", "start_new_kernel", "(", "startup_timeout", "=", "60", ",", "kernel_name", "=", "'python'", ",", "*", "*", "kwargs", ")", ":", "logger", ".", "debug", "(", "'Starting new kernel: \"%s\"'", "%", "kernel_name", ")", "km", "=", "KernelManager", "(", "kernel_name", "=", "kernel_name", ",", "kernel_spec_manager", "=", "NbvalKernelspecManager", "(", ")", ")", "km", ".", "start_kernel", "(", "*", "*", "kwargs", ")", "kc", "=", "km", ".", "client", "(", ")", "kc", ".", "start_channels", "(", ")", "try", ":", "kc", ".", "wait_for_ready", "(", "timeout", "=", "startup_timeout", ")", "except", "RuntimeError", ":", "logger", ".", "exception", "(", "'Failure starting kernel \"%s\"'", ",", "kernel_name", ")", "kc", ".", "stop_channels", "(", ")", "km", ".", "shutdown_kernel", "(", ")", "raise", "return", "km", ",", "kc" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
NbvalKernelspecManager.get_kernel_spec
Returns a :class:`KernelSpec` instance for the given kernel_name. Raises :exc:`NoSuchKernel` if the given kernel name is not found.
docs_src/nbval/kernel.py
def get_kernel_spec(self, kernel_name): """Returns a :class:`KernelSpec` instance for the given kernel_name. Raises :exc:`NoSuchKernel` if the given kernel name is not found. """ if kernel_name == CURRENT_ENV_KERNEL_NAME: return self.kernel_spec_class( resource_dir=ipykernel.kernelspec.RESOURCES, **ipykernel.kernelspec.get_kernel_dict()) else: return super(NbvalKernelspecManager, self).get_kernel_spec(kernel_name)
def get_kernel_spec(self, kernel_name): """Returns a :class:`KernelSpec` instance for the given kernel_name. Raises :exc:`NoSuchKernel` if the given kernel name is not found. """ if kernel_name == CURRENT_ENV_KERNEL_NAME: return self.kernel_spec_class( resource_dir=ipykernel.kernelspec.RESOURCES, **ipykernel.kernelspec.get_kernel_dict()) else: return super(NbvalKernelspecManager, self).get_kernel_spec(kernel_name)
[ "Returns", "a", ":", "class", ":", "KernelSpec", "instance", "for", "the", "given", "kernel_name", "." ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/docs_src/nbval/kernel.py#L35-L45
[ "def", "get_kernel_spec", "(", "self", ",", "kernel_name", ")", ":", "if", "kernel_name", "==", "CURRENT_ENV_KERNEL_NAME", ":", "return", "self", ".", "kernel_spec_class", "(", "resource_dir", "=", "ipykernel", ".", "kernelspec", ".", "RESOURCES", ",", "*", "*", "ipykernel", ".", "kernelspec", ".", "get_kernel_dict", "(", ")", ")", "else", ":", "return", "super", "(", "NbvalKernelspecManager", ",", "self", ")", ".", "get_kernel_spec", "(", "kernel_name", ")" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
RunningKernel.get_message
Function is used to get a message from the iopub channel. Timeout is None by default When timeout is reached
docs_src/nbval/kernel.py
def get_message(self, stream, timeout=None): """ Function is used to get a message from the iopub channel. Timeout is None by default When timeout is reached """ try: if stream == 'iopub': msg = self.kc.get_iopub_msg(timeout=timeout) elif stream == 'shell': msg = self.kc.get_shell_msg(timeout=timeout) else: raise ValueError('Invalid stream specified: "%s"' % stream) except Empty: logger.debug('Kernel: Timeout waiting for message on %s', stream) raise logger.debug("Kernel message (%s):\n%s", stream, pformat(msg)) return msg
def get_message(self, stream, timeout=None): """ Function is used to get a message from the iopub channel. Timeout is None by default When timeout is reached """ try: if stream == 'iopub': msg = self.kc.get_iopub_msg(timeout=timeout) elif stream == 'shell': msg = self.kc.get_shell_msg(timeout=timeout) else: raise ValueError('Invalid stream specified: "%s"' % stream) except Empty: logger.debug('Kernel: Timeout waiting for message on %s', stream) raise logger.debug("Kernel message (%s):\n%s", stream, pformat(msg)) return msg
[ "Function", "is", "used", "to", "get", "a", "message", "from", "the", "iopub", "channel", ".", "Timeout", "is", "None", "by", "default", "When", "timeout", "is", "reached" ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/docs_src/nbval/kernel.py#L115-L132
[ "def", "get_message", "(", "self", ",", "stream", ",", "timeout", "=", "None", ")", ":", "try", ":", "if", "stream", "==", "'iopub'", ":", "msg", "=", "self", ".", "kc", ".", "get_iopub_msg", "(", "timeout", "=", "timeout", ")", "elif", "stream", "==", "'shell'", ":", "msg", "=", "self", ".", "kc", ".", "get_shell_msg", "(", "timeout", "=", "timeout", ")", "else", ":", "raise", "ValueError", "(", "'Invalid stream specified: \"%s\"'", "%", "stream", ")", "except", "Empty", ":", "logger", ".", "debug", "(", "'Kernel: Timeout waiting for message on %s'", ",", "stream", ")", "raise", "logger", ".", "debug", "(", "\"Kernel message (%s):\\n%s\"", ",", "stream", ",", "pformat", "(", "msg", ")", ")", "return", "msg" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
RunningKernel.execute_cell_input
Executes a string of python code in cell input. We do not allow the kernel to make requests to the stdin this is the norm for notebooks Function returns a unique message id of the reply from the kernel.
docs_src/nbval/kernel.py
def execute_cell_input(self, cell_input, allow_stdin=None): """ Executes a string of python code in cell input. We do not allow the kernel to make requests to the stdin this is the norm for notebooks Function returns a unique message id of the reply from the kernel. """ if cell_input: logger.debug('Executing cell: "%s"...', cell_input.splitlines()[0][:40]) else: logger.debug('Executing empty cell') return self.kc.execute(cell_input, allow_stdin=allow_stdin, stop_on_error=False)
def execute_cell_input(self, cell_input, allow_stdin=None): """ Executes a string of python code in cell input. We do not allow the kernel to make requests to the stdin this is the norm for notebooks Function returns a unique message id of the reply from the kernel. """ if cell_input: logger.debug('Executing cell: "%s"...', cell_input.splitlines()[0][:40]) else: logger.debug('Executing empty cell') return self.kc.execute(cell_input, allow_stdin=allow_stdin, stop_on_error=False)
[ "Executes", "a", "string", "of", "python", "code", "in", "cell", "input", ".", "We", "do", "not", "allow", "the", "kernel", "to", "make", "requests", "to", "the", "stdin", "this", "is", "the", "norm", "for", "notebooks" ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/docs_src/nbval/kernel.py#L134-L147
[ "def", "execute_cell_input", "(", "self", ",", "cell_input", ",", "allow_stdin", "=", "None", ")", ":", "if", "cell_input", ":", "logger", ".", "debug", "(", "'Executing cell: \"%s\"...'", ",", "cell_input", ".", "splitlines", "(", ")", "[", "0", "]", "[", ":", "40", "]", ")", "else", ":", "logger", ".", "debug", "(", "'Executing empty cell'", ")", "return", "self", ".", "kc", ".", "execute", "(", "cell_input", ",", "allow_stdin", "=", "allow_stdin", ",", "stop_on_error", "=", "False", ")" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
RunningKernel.await_reply
Continuously poll the kernel 'shell' stream for messages until: - It receives an 'execute_reply' status for the given message id - The timeout is reached awaiting a message, in which case a `Queue.Empty` exception will be raised.
docs_src/nbval/kernel.py
def await_reply(self, msg_id, timeout=None): """ Continuously poll the kernel 'shell' stream for messages until: - It receives an 'execute_reply' status for the given message id - The timeout is reached awaiting a message, in which case a `Queue.Empty` exception will be raised. """ while True: msg = self.get_message(stream='shell', timeout=timeout) # Is this the message we are waiting for? if msg['parent_header'].get('msg_id') == msg_id: if msg['content']['status'] == 'aborted': # This should not occur! raise RuntimeError('Kernel aborted execution request') return
def await_reply(self, msg_id, timeout=None): """ Continuously poll the kernel 'shell' stream for messages until: - It receives an 'execute_reply' status for the given message id - The timeout is reached awaiting a message, in which case a `Queue.Empty` exception will be raised. """ while True: msg = self.get_message(stream='shell', timeout=timeout) # Is this the message we are waiting for? if msg['parent_header'].get('msg_id') == msg_id: if msg['content']['status'] == 'aborted': # This should not occur! raise RuntimeError('Kernel aborted execution request') return
[ "Continuously", "poll", "the", "kernel", "shell", "stream", "for", "messages", "until", ":", "-", "It", "receives", "an", "execute_reply", "status", "for", "the", "given", "message", "id", "-", "The", "timeout", "is", "reached", "awaiting", "a", "message", "in", "which", "case", "a", "Queue", ".", "Empty", "exception", "will", "be", "raised", "." ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/docs_src/nbval/kernel.py#L149-L164
[ "def", "await_reply", "(", "self", ",", "msg_id", ",", "timeout", "=", "None", ")", ":", "while", "True", ":", "msg", "=", "self", ".", "get_message", "(", "stream", "=", "'shell'", ",", "timeout", "=", "timeout", ")", "# Is this the message we are waiting for?", "if", "msg", "[", "'parent_header'", "]", ".", "get", "(", "'msg_id'", ")", "==", "msg_id", ":", "if", "msg", "[", "'content'", "]", "[", "'status'", "]", "==", "'aborted'", ":", "# This should not occur!", "raise", "RuntimeError", "(", "'Kernel aborted execution request'", ")", "return" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
RunningKernel.await_idle
Poll the iopub stream until an idle message is received for the given parent ID
docs_src/nbval/kernel.py
def await_idle(self, parent_id, timeout): """Poll the iopub stream until an idle message is received for the given parent ID""" while True: # Get a message from the kernel iopub channel msg = self.get_message(timeout=timeout, stream='iopub') # raises Empty on timeout! if msg['parent_header'].get('msg_id') != parent_id: continue if msg['msg_type'] == 'status': if msg['content']['execution_state'] == 'idle': break
def await_idle(self, parent_id, timeout): """Poll the iopub stream until an idle message is received for the given parent ID""" while True: # Get a message from the kernel iopub channel msg = self.get_message(timeout=timeout, stream='iopub') # raises Empty on timeout! if msg['parent_header'].get('msg_id') != parent_id: continue if msg['msg_type'] == 'status': if msg['content']['execution_state'] == 'idle': break
[ "Poll", "the", "iopub", "stream", "until", "an", "idle", "message", "is", "received", "for", "the", "given", "parent", "ID" ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/docs_src/nbval/kernel.py#L166-L176
[ "def", "await_idle", "(", "self", ",", "parent_id", ",", "timeout", ")", ":", "while", "True", ":", "# Get a message from the kernel iopub channel", "msg", "=", "self", ".", "get_message", "(", "timeout", "=", "timeout", ",", "stream", "=", "'iopub'", ")", "# raises Empty on timeout!", "if", "msg", "[", "'parent_header'", "]", ".", "get", "(", "'msg_id'", ")", "!=", "parent_id", ":", "continue", "if", "msg", "[", "'msg_type'", "]", "==", "'status'", ":", "if", "msg", "[", "'content'", "]", "[", "'execution_state'", "]", "==", "'idle'", ":", "break" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
RunningKernel.stop
Instructs the kernel process to stop channels and the kernel manager to then shutdown the process.
docs_src/nbval/kernel.py
def stop(self): """ Instructs the kernel process to stop channels and the kernel manager to then shutdown the process. """ logger.debug('Stopping kernel') self.kc.stop_channels() self.km.shutdown_kernel(now=True) del self.km
def stop(self): """ Instructs the kernel process to stop channels and the kernel manager to then shutdown the process. """ logger.debug('Stopping kernel') self.kc.stop_channels() self.km.shutdown_kernel(now=True) del self.km
[ "Instructs", "the", "kernel", "process", "to", "stop", "channels", "and", "the", "kernel", "manager", "to", "then", "shutdown", "the", "process", "." ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/docs_src/nbval/kernel.py#L200-L208
[ "def", "stop", "(", "self", ")", ":", "logger", ".", "debug", "(", "'Stopping kernel'", ")", "self", ".", "kc", ".", "stop_channels", "(", ")", "self", ".", "km", ".", "shutdown_kernel", "(", "now", "=", "True", ")", "del", "self", ".", "km" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
get_cv_idxs
Get a list of index values for Validation set from a dataset Arguments: n : int, Total number of elements in the data set. cv_idx : int, starting index [idx_start = cv_idx*int(val_pct*n)] val_pct : (int, float), validation set percentage seed : seed value for RandomState Returns: list of indexes
old/fastai/dataset.py
def get_cv_idxs(n, cv_idx=0, val_pct=0.2, seed=42): """ Get a list of index values for Validation set from a dataset Arguments: n : int, Total number of elements in the data set. cv_idx : int, starting index [idx_start = cv_idx*int(val_pct*n)] val_pct : (int, float), validation set percentage seed : seed value for RandomState Returns: list of indexes """ np.random.seed(seed) n_val = int(val_pct*n) idx_start = cv_idx*n_val idxs = np.random.permutation(n) return idxs[idx_start:idx_start+n_val]
def get_cv_idxs(n, cv_idx=0, val_pct=0.2, seed=42): """ Get a list of index values for Validation set from a dataset Arguments: n : int, Total number of elements in the data set. cv_idx : int, starting index [idx_start = cv_idx*int(val_pct*n)] val_pct : (int, float), validation set percentage seed : seed value for RandomState Returns: list of indexes """ np.random.seed(seed) n_val = int(val_pct*n) idx_start = cv_idx*n_val idxs = np.random.permutation(n) return idxs[idx_start:idx_start+n_val]
[ "Get", "a", "list", "of", "index", "values", "for", "Validation", "set", "from", "a", "dataset", "Arguments", ":", "n", ":", "int", "Total", "number", "of", "elements", "in", "the", "data", "set", ".", "cv_idx", ":", "int", "starting", "index", "[", "idx_start", "=", "cv_idx", "*", "int", "(", "val_pct", "*", "n", ")", "]", "val_pct", ":", "(", "int", "float", ")", "validation", "set", "percentage", "seed", ":", "seed", "value", "for", "RandomState", "Returns", ":", "list", "of", "indexes" ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/old/fastai/dataset.py#L13-L29
[ "def", "get_cv_idxs", "(", "n", ",", "cv_idx", "=", "0", ",", "val_pct", "=", "0.2", ",", "seed", "=", "42", ")", ":", "np", ".", "random", ".", "seed", "(", "seed", ")", "n_val", "=", "int", "(", "val_pct", "*", "n", ")", "idx_start", "=", "cv_idx", "*", "n_val", "idxs", "=", "np", ".", "random", ".", "permutation", "(", "n", ")", "return", "idxs", "[", "idx_start", ":", "idx_start", "+", "n_val", "]" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
resize_img
Enlarge or shrink a single image to scale, such that the smaller of the height or width dimension is equal to targ.
old/fastai/dataset.py
def resize_img(fname, targ, path, new_path, fn=None): """ Enlarge or shrink a single image to scale, such that the smaller of the height or width dimension is equal to targ. """ if fn is None: fn = resize_fn(targ) dest = os.path.join(path_for(path, new_path, targ), fname) if os.path.exists(dest): return im = Image.open(os.path.join(path, fname)).convert('RGB') os.makedirs(os.path.split(dest)[0], exist_ok=True) fn(im).save(dest)
def resize_img(fname, targ, path, new_path, fn=None): """ Enlarge or shrink a single image to scale, such that the smaller of the height or width dimension is equal to targ. """ if fn is None: fn = resize_fn(targ) dest = os.path.join(path_for(path, new_path, targ), fname) if os.path.exists(dest): return im = Image.open(os.path.join(path, fname)).convert('RGB') os.makedirs(os.path.split(dest)[0], exist_ok=True) fn(im).save(dest)
[ "Enlarge", "or", "shrink", "a", "single", "image", "to", "scale", "such", "that", "the", "smaller", "of", "the", "height", "or", "width", "dimension", "is", "equal", "to", "targ", "." ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/old/fastai/dataset.py#L34-L44
[ "def", "resize_img", "(", "fname", ",", "targ", ",", "path", ",", "new_path", ",", "fn", "=", "None", ")", ":", "if", "fn", "is", "None", ":", "fn", "=", "resize_fn", "(", "targ", ")", "dest", "=", "os", ".", "path", ".", "join", "(", "path_for", "(", "path", ",", "new_path", ",", "targ", ")", ",", "fname", ")", "if", "os", ".", "path", ".", "exists", "(", "dest", ")", ":", "return", "im", "=", "Image", ".", "open", "(", "os", ".", "path", ".", "join", "(", "path", ",", "fname", ")", ")", ".", "convert", "(", "'RGB'", ")", "os", ".", "makedirs", "(", "os", ".", "path", ".", "split", "(", "dest", ")", "[", "0", "]", ",", "exist_ok", "=", "True", ")", "fn", "(", "im", ")", ".", "save", "(", "dest", ")" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
train
resize_imgs
Enlarge or shrink a set of images in the same directory to scale, such that the smaller of the height or width dimension is equal to targ. Note: -- This function is multithreaded for efficiency. -- When destination file or folder already exist, function exists without raising an error.
old/fastai/dataset.py
def resize_imgs(fnames, targ, path, new_path, resume=True, fn=None): """ Enlarge or shrink a set of images in the same directory to scale, such that the smaller of the height or width dimension is equal to targ. Note: -- This function is multithreaded for efficiency. -- When destination file or folder already exist, function exists without raising an error. """ target_path = path_for(path, new_path, targ) if resume: subdirs = {os.path.dirname(p) for p in fnames} subdirs = {s for s in subdirs if os.path.exists(os.path.join(target_path, s))} already_resized_fnames = set() for subdir in subdirs: files = [os.path.join(subdir, file) for file in os.listdir(os.path.join(target_path, subdir))] already_resized_fnames.update(set(files)) original_fnames = set(fnames) fnames = list(original_fnames - already_resized_fnames) errors = {} def safely_process(fname): try: resize_img(fname, targ, path, new_path, fn=fn) except Exception as ex: errors[fname] = str(ex) if len(fnames) > 0: with ThreadPoolExecutor(num_cpus()) as e: ims = e.map(lambda fname: safely_process(fname), fnames) for _ in tqdm(ims, total=len(fnames), leave=False): pass if errors: print('Some images failed to process:') print(json.dumps(errors, indent=2)) return os.path.join(path,new_path,str(targ))
def resize_imgs(fnames, targ, path, new_path, resume=True, fn=None): """ Enlarge or shrink a set of images in the same directory to scale, such that the smaller of the height or width dimension is equal to targ. Note: -- This function is multithreaded for efficiency. -- When destination file or folder already exist, function exists without raising an error. """ target_path = path_for(path, new_path, targ) if resume: subdirs = {os.path.dirname(p) for p in fnames} subdirs = {s for s in subdirs if os.path.exists(os.path.join(target_path, s))} already_resized_fnames = set() for subdir in subdirs: files = [os.path.join(subdir, file) for file in os.listdir(os.path.join(target_path, subdir))] already_resized_fnames.update(set(files)) original_fnames = set(fnames) fnames = list(original_fnames - already_resized_fnames) errors = {} def safely_process(fname): try: resize_img(fname, targ, path, new_path, fn=fn) except Exception as ex: errors[fname] = str(ex) if len(fnames) > 0: with ThreadPoolExecutor(num_cpus()) as e: ims = e.map(lambda fname: safely_process(fname), fnames) for _ in tqdm(ims, total=len(fnames), leave=False): pass if errors: print('Some images failed to process:') print(json.dumps(errors, indent=2)) return os.path.join(path,new_path,str(targ))
[ "Enlarge", "or", "shrink", "a", "set", "of", "images", "in", "the", "same", "directory", "to", "scale", "such", "that", "the", "smaller", "of", "the", "height", "or", "width", "dimension", "is", "equal", "to", "targ", ".", "Note", ":", "--", "This", "function", "is", "multithreaded", "for", "efficiency", ".", "--", "When", "destination", "file", "or", "folder", "already", "exist", "function", "exists", "without", "raising", "an", "error", "." ]
fastai/fastai
python
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/old/fastai/dataset.py#L55-L87
[ "def", "resize_imgs", "(", "fnames", ",", "targ", ",", "path", ",", "new_path", ",", "resume", "=", "True", ",", "fn", "=", "None", ")", ":", "target_path", "=", "path_for", "(", "path", ",", "new_path", ",", "targ", ")", "if", "resume", ":", "subdirs", "=", "{", "os", ".", "path", ".", "dirname", "(", "p", ")", "for", "p", "in", "fnames", "}", "subdirs", "=", "{", "s", "for", "s", "in", "subdirs", "if", "os", ".", "path", ".", "exists", "(", "os", ".", "path", ".", "join", "(", "target_path", ",", "s", ")", ")", "}", "already_resized_fnames", "=", "set", "(", ")", "for", "subdir", "in", "subdirs", ":", "files", "=", "[", "os", ".", "path", ".", "join", "(", "subdir", ",", "file", ")", "for", "file", "in", "os", ".", "listdir", "(", "os", ".", "path", ".", "join", "(", "target_path", ",", "subdir", ")", ")", "]", "already_resized_fnames", ".", "update", "(", "set", "(", "files", ")", ")", "original_fnames", "=", "set", "(", "fnames", ")", "fnames", "=", "list", "(", "original_fnames", "-", "already_resized_fnames", ")", "errors", "=", "{", "}", "def", "safely_process", "(", "fname", ")", ":", "try", ":", "resize_img", "(", "fname", ",", "targ", ",", "path", ",", "new_path", ",", "fn", "=", "fn", ")", "except", "Exception", "as", "ex", ":", "errors", "[", "fname", "]", "=", "str", "(", "ex", ")", "if", "len", "(", "fnames", ")", ">", "0", ":", "with", "ThreadPoolExecutor", "(", "num_cpus", "(", ")", ")", "as", "e", ":", "ims", "=", "e", ".", "map", "(", "lambda", "fname", ":", "safely_process", "(", "fname", ")", ",", "fnames", ")", "for", "_", "in", "tqdm", "(", "ims", ",", "total", "=", "len", "(", "fnames", ")", ",", "leave", "=", "False", ")", ":", "pass", "if", "errors", ":", "print", "(", "'Some images failed to process:'", ")", "print", "(", "json", ".", "dumps", "(", "errors", ",", "indent", "=", "2", ")", ")", "return", "os", ".", "path", ".", "join", "(", "path", ",", "new_path", ",", "str", "(", "targ", ")", ")" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67