id
int32
0
252k
repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
list
docstring
stringlengths
3
17.3k
docstring_tokens
list
sha
stringlengths
40
40
url
stringlengths
87
242
20,800
fastai/fastai
old/fastai/sgdr.py
LossRecorder.plot_loss
def plot_loss(self, n_skip=10, n_skip_end=5): ''' plots loss function as function of iterations. When used in Jupyternotebook, plot will be displayed in notebook. Else, plot will be displayed in console and both plot and loss are saved in save_path. ''' if not in_ipynb(): plt.switch_backend('agg') plt.plot(self.iterations[n_skip:-n_skip_end], self.losses[n_skip:-n_skip_end]) if not in_ipynb(): plt.savefig(os.path.join(self.save_path, 'loss_plot.png')) np.save(os.path.join(self.save_path, 'losses.npy'), self.losses[10:])
python
def plot_loss(self, n_skip=10, n_skip_end=5): ''' plots loss function as function of iterations. When used in Jupyternotebook, plot will be displayed in notebook. Else, plot will be displayed in console and both plot and loss are saved in save_path. ''' if not in_ipynb(): plt.switch_backend('agg') plt.plot(self.iterations[n_skip:-n_skip_end], self.losses[n_skip:-n_skip_end]) if not in_ipynb(): plt.savefig(os.path.join(self.save_path, 'loss_plot.png')) np.save(os.path.join(self.save_path, 'losses.npy'), self.losses[10:])
[ "def", "plot_loss", "(", "self", ",", "n_skip", "=", "10", ",", "n_skip_end", "=", "5", ")", ":", "if", "not", "in_ipynb", "(", ")", ":", "plt", ".", "switch_backend", "(", "'agg'", ")", "plt", ".", "plot", "(", "self", ".", "iterations", "[", "n_skip", ":", "-", "n_skip_end", "]", ",", "self", ".", "losses", "[", "n_skip", ":", "-", "n_skip_end", "]", ")", "if", "not", "in_ipynb", "(", ")", ":", "plt", ".", "savefig", "(", "os", ".", "path", ".", "join", "(", "self", ".", "save_path", ",", "'loss_plot.png'", ")", ")", "np", ".", "save", "(", "os", ".", "path", ".", "join", "(", "self", ".", "save_path", ",", "'losses.npy'", ")", ",", "self", ".", "losses", "[", "10", ":", "]", ")" ]
plots loss function as function of iterations. When used in Jupyternotebook, plot will be displayed in notebook. Else, plot will be displayed in console and both plot and loss are saved in save_path.
[ "plots", "loss", "function", "as", "function", "of", "iterations", ".", "When", "used", "in", "Jupyternotebook", "plot", "will", "be", "displayed", "in", "notebook", ".", "Else", "plot", "will", "be", "displayed", "in", "console", "and", "both", "plot", "and", "loss", "are", "saved", "in", "save_path", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/old/fastai/sgdr.py#L100-L109
20,801
fastai/fastai
old/fastai/sgdr.py
LossRecorder.plot_lr
def plot_lr(self): '''Plots learning rate in jupyter notebook or console, depending on the enviroment of the learner.''' if not in_ipynb(): plt.switch_backend('agg') if self.record_mom: fig, axs = plt.subplots(1,2,figsize=(12,4)) for i in range(0,2): axs[i].set_xlabel('iterations') axs[0].set_ylabel('learning rate') axs[1].set_ylabel('momentum') axs[0].plot(self.iterations,self.lrs) axs[1].plot(self.iterations,self.momentums) else: plt.xlabel("iterations") plt.ylabel("learning rate") plt.plot(self.iterations, self.lrs) if not in_ipynb(): plt.savefig(os.path.join(self.save_path, 'lr_plot.png'))
python
def plot_lr(self): '''Plots learning rate in jupyter notebook or console, depending on the enviroment of the learner.''' if not in_ipynb(): plt.switch_backend('agg') if self.record_mom: fig, axs = plt.subplots(1,2,figsize=(12,4)) for i in range(0,2): axs[i].set_xlabel('iterations') axs[0].set_ylabel('learning rate') axs[1].set_ylabel('momentum') axs[0].plot(self.iterations,self.lrs) axs[1].plot(self.iterations,self.momentums) else: plt.xlabel("iterations") plt.ylabel("learning rate") plt.plot(self.iterations, self.lrs) if not in_ipynb(): plt.savefig(os.path.join(self.save_path, 'lr_plot.png'))
[ "def", "plot_lr", "(", "self", ")", ":", "if", "not", "in_ipynb", "(", ")", ":", "plt", ".", "switch_backend", "(", "'agg'", ")", "if", "self", ".", "record_mom", ":", "fig", ",", "axs", "=", "plt", ".", "subplots", "(", "1", ",", "2", ",", "figsize", "=", "(", "12", ",", "4", ")", ")", "for", "i", "in", "range", "(", "0", ",", "2", ")", ":", "axs", "[", "i", "]", ".", "set_xlabel", "(", "'iterations'", ")", "axs", "[", "0", "]", ".", "set_ylabel", "(", "'learning rate'", ")", "axs", "[", "1", "]", ".", "set_ylabel", "(", "'momentum'", ")", "axs", "[", "0", "]", ".", "plot", "(", "self", ".", "iterations", ",", "self", ".", "lrs", ")", "axs", "[", "1", "]", ".", "plot", "(", "self", ".", "iterations", ",", "self", ".", "momentums", ")", "else", ":", "plt", ".", "xlabel", "(", "\"iterations\"", ")", "plt", ".", "ylabel", "(", "\"learning rate\"", ")", "plt", ".", "plot", "(", "self", ".", "iterations", ",", "self", ".", "lrs", ")", "if", "not", "in_ipynb", "(", ")", ":", "plt", ".", "savefig", "(", "os", ".", "path", ".", "join", "(", "self", ".", "save_path", ",", "'lr_plot.png'", ")", ")" ]
Plots learning rate in jupyter notebook or console, depending on the enviroment of the learner.
[ "Plots", "learning", "rate", "in", "jupyter", "notebook", "or", "console", "depending", "on", "the", "enviroment", "of", "the", "learner", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/old/fastai/sgdr.py#L111-L127
20,802
fastai/fastai
old/fastai/sgdr.py
LR_Finder.plot
def plot(self, n_skip=10, n_skip_end=5): ''' Plots the loss function with respect to learning rate, in log scale. ''' plt.ylabel("validation loss") plt.xlabel("learning rate (log scale)") plt.plot(self.lrs[n_skip:-(n_skip_end+1)], self.losses[n_skip:-(n_skip_end+1)]) plt.xscale('log')
python
def plot(self, n_skip=10, n_skip_end=5): ''' Plots the loss function with respect to learning rate, in log scale. ''' plt.ylabel("validation loss") plt.xlabel("learning rate (log scale)") plt.plot(self.lrs[n_skip:-(n_skip_end+1)], self.losses[n_skip:-(n_skip_end+1)]) plt.xscale('log')
[ "def", "plot", "(", "self", ",", "n_skip", "=", "10", ",", "n_skip_end", "=", "5", ")", ":", "plt", ".", "ylabel", "(", "\"validation loss\"", ")", "plt", ".", "xlabel", "(", "\"learning rate (log scale)\"", ")", "plt", ".", "plot", "(", "self", ".", "lrs", "[", "n_skip", ":", "-", "(", "n_skip_end", "+", "1", ")", "]", ",", "self", ".", "losses", "[", "n_skip", ":", "-", "(", "n_skip_end", "+", "1", ")", "]", ")", "plt", ".", "xscale", "(", "'log'", ")" ]
Plots the loss function with respect to learning rate, in log scale.
[ "Plots", "the", "loss", "function", "with", "respect", "to", "learning", "rate", "in", "log", "scale", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/old/fastai/sgdr.py#L190-L197
20,803
fastai/fastai
examples/train_imagenette.py
main
def main( gpu:Param("GPU to run on", str)=None, woof: Param("Use imagewoof (otherwise imagenette)", int)=0, lr: Param("Learning rate", float)=1e-3, size: Param("Size (px: 128,192,224)", int)=128, alpha: Param("Alpha", float)=0.99, mom: Param("Momentum", float)=0.9, eps: Param("epsilon", float)=1e-6, epochs: Param("Number of epochs", int)=5, bs: Param("Batch size", int)=256, mixup: Param("Mixup", float)=0., opt: Param("Optimizer (adam,rms,sgd)", str)='adam', arch: Param("Architecture (xresnet34, xresnet50, presnet34, presnet50)", str)='xresnet50', dump: Param("Print model; don't train", int)=0, ): "Distributed training of Imagenette." gpu = setup_distrib(gpu) if gpu is None: bs *= torch.cuda.device_count() if opt=='adam' : opt_func = partial(optim.Adam, betas=(mom,alpha), eps=eps) elif opt=='rms' : opt_func = partial(optim.RMSprop, alpha=alpha, eps=eps) elif opt=='sgd' : opt_func = partial(optim.SGD, momentum=mom) data = get_data(size, woof, bs) bs_rat = bs/256 if gpu is not None: bs_rat *= num_distrib() if not gpu: print(f'lr: {lr}; eff_lr: {lr*bs_rat}; size: {size}; alpha: {alpha}; mom: {mom}; eps: {eps}') lr *= bs_rat m = globals()[arch] learn = (Learner(data, m(c_out=10), wd=1e-2, opt_func=opt_func, metrics=[accuracy,top_k_accuracy], bn_wd=False, true_wd=True, loss_func = LabelSmoothingCrossEntropy()) ) if dump: print(learn.model); exit() if mixup: learn = learn.mixup(alpha=mixup) learn = learn.to_fp16(dynamic=True) if gpu is None: learn.to_parallel() elif num_distrib()>1: learn.to_distributed(gpu) # Requires `-m fastai.launch` learn.fit_one_cycle(epochs, lr, div_factor=10, pct_start=0.3)
python
def main( gpu:Param("GPU to run on", str)=None, woof: Param("Use imagewoof (otherwise imagenette)", int)=0, lr: Param("Learning rate", float)=1e-3, size: Param("Size (px: 128,192,224)", int)=128, alpha: Param("Alpha", float)=0.99, mom: Param("Momentum", float)=0.9, eps: Param("epsilon", float)=1e-6, epochs: Param("Number of epochs", int)=5, bs: Param("Batch size", int)=256, mixup: Param("Mixup", float)=0., opt: Param("Optimizer (adam,rms,sgd)", str)='adam', arch: Param("Architecture (xresnet34, xresnet50, presnet34, presnet50)", str)='xresnet50', dump: Param("Print model; don't train", int)=0, ): "Distributed training of Imagenette." gpu = setup_distrib(gpu) if gpu is None: bs *= torch.cuda.device_count() if opt=='adam' : opt_func = partial(optim.Adam, betas=(mom,alpha), eps=eps) elif opt=='rms' : opt_func = partial(optim.RMSprop, alpha=alpha, eps=eps) elif opt=='sgd' : opt_func = partial(optim.SGD, momentum=mom) data = get_data(size, woof, bs) bs_rat = bs/256 if gpu is not None: bs_rat *= num_distrib() if not gpu: print(f'lr: {lr}; eff_lr: {lr*bs_rat}; size: {size}; alpha: {alpha}; mom: {mom}; eps: {eps}') lr *= bs_rat m = globals()[arch] learn = (Learner(data, m(c_out=10), wd=1e-2, opt_func=opt_func, metrics=[accuracy,top_k_accuracy], bn_wd=False, true_wd=True, loss_func = LabelSmoothingCrossEntropy()) ) if dump: print(learn.model); exit() if mixup: learn = learn.mixup(alpha=mixup) learn = learn.to_fp16(dynamic=True) if gpu is None: learn.to_parallel() elif num_distrib()>1: learn.to_distributed(gpu) # Requires `-m fastai.launch` learn.fit_one_cycle(epochs, lr, div_factor=10, pct_start=0.3)
[ "def", "main", "(", "gpu", ":", "Param", "(", "\"GPU to run on\"", ",", "str", ")", "=", "None", ",", "woof", ":", "Param", "(", "\"Use imagewoof (otherwise imagenette)\"", ",", "int", ")", "=", "0", ",", "lr", ":", "Param", "(", "\"Learning rate\"", ",", "float", ")", "=", "1e-3", ",", "size", ":", "Param", "(", "\"Size (px: 128,192,224)\"", ",", "int", ")", "=", "128", ",", "alpha", ":", "Param", "(", "\"Alpha\"", ",", "float", ")", "=", "0.99", ",", "mom", ":", "Param", "(", "\"Momentum\"", ",", "float", ")", "=", "0.9", ",", "eps", ":", "Param", "(", "\"epsilon\"", ",", "float", ")", "=", "1e-6", ",", "epochs", ":", "Param", "(", "\"Number of epochs\"", ",", "int", ")", "=", "5", ",", "bs", ":", "Param", "(", "\"Batch size\"", ",", "int", ")", "=", "256", ",", "mixup", ":", "Param", "(", "\"Mixup\"", ",", "float", ")", "=", "0.", ",", "opt", ":", "Param", "(", "\"Optimizer (adam,rms,sgd)\"", ",", "str", ")", "=", "'adam'", ",", "arch", ":", "Param", "(", "\"Architecture (xresnet34, xresnet50, presnet34, presnet50)\"", ",", "str", ")", "=", "'xresnet50'", ",", "dump", ":", "Param", "(", "\"Print model; don't train\"", ",", "int", ")", "=", "0", ",", ")", ":", "gpu", "=", "setup_distrib", "(", "gpu", ")", "if", "gpu", "is", "None", ":", "bs", "*=", "torch", ".", "cuda", ".", "device_count", "(", ")", "if", "opt", "==", "'adam'", ":", "opt_func", "=", "partial", "(", "optim", ".", "Adam", ",", "betas", "=", "(", "mom", ",", "alpha", ")", ",", "eps", "=", "eps", ")", "elif", "opt", "==", "'rms'", ":", "opt_func", "=", "partial", "(", "optim", ".", "RMSprop", ",", "alpha", "=", "alpha", ",", "eps", "=", "eps", ")", "elif", "opt", "==", "'sgd'", ":", "opt_func", "=", "partial", "(", "optim", ".", "SGD", ",", "momentum", "=", "mom", ")", "data", "=", "get_data", "(", "size", ",", "woof", ",", "bs", ")", "bs_rat", "=", "bs", "/", "256", "if", "gpu", "is", "not", "None", ":", "bs_rat", "*=", "num_distrib", "(", ")", "if", "not", "gpu", ":", "print", "(", "f'lr: {lr}; eff_lr: {lr*bs_rat}; size: {size}; alpha: {alpha}; mom: {mom}; eps: {eps}'", ")", "lr", "*=", "bs_rat", "m", "=", "globals", "(", ")", "[", "arch", "]", "learn", "=", "(", "Learner", "(", "data", ",", "m", "(", "c_out", "=", "10", ")", ",", "wd", "=", "1e-2", ",", "opt_func", "=", "opt_func", ",", "metrics", "=", "[", "accuracy", ",", "top_k_accuracy", "]", ",", "bn_wd", "=", "False", ",", "true_wd", "=", "True", ",", "loss_func", "=", "LabelSmoothingCrossEntropy", "(", ")", ")", ")", "if", "dump", ":", "print", "(", "learn", ".", "model", ")", "exit", "(", ")", "if", "mixup", ":", "learn", "=", "learn", ".", "mixup", "(", "alpha", "=", "mixup", ")", "learn", "=", "learn", ".", "to_fp16", "(", "dynamic", "=", "True", ")", "if", "gpu", "is", "None", ":", "learn", ".", "to_parallel", "(", ")", "elif", "num_distrib", "(", ")", ">", "1", ":", "learn", ".", "to_distributed", "(", "gpu", ")", "# Requires `-m fastai.launch`", "learn", ".", "fit_one_cycle", "(", "epochs", ",", "lr", ",", "div_factor", "=", "10", ",", "pct_start", "=", "0.3", ")" ]
Distributed training of Imagenette.
[ "Distributed", "training", "of", "Imagenette", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/examples/train_imagenette.py#L30-L71
20,804
fastai/fastai
fastai/callbacks/tracker.py
TerminateOnNaNCallback.on_batch_end
def on_batch_end(self, last_loss, epoch, num_batch, **kwargs:Any)->None: "Test if `last_loss` is NaN and interrupts training." if self.stop: return True #to skip validation after stopping during training if torch.isnan(last_loss): print (f'Epoch/Batch ({epoch}/{num_batch}): Invalid loss, terminating training.') return {'stop_epoch': True, 'stop_training': True, 'skip_validate': True}
python
def on_batch_end(self, last_loss, epoch, num_batch, **kwargs:Any)->None: "Test if `last_loss` is NaN and interrupts training." if self.stop: return True #to skip validation after stopping during training if torch.isnan(last_loss): print (f'Epoch/Batch ({epoch}/{num_batch}): Invalid loss, terminating training.') return {'stop_epoch': True, 'stop_training': True, 'skip_validate': True}
[ "def", "on_batch_end", "(", "self", ",", "last_loss", ",", "epoch", ",", "num_batch", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "None", ":", "if", "self", ".", "stop", ":", "return", "True", "#to skip validation after stopping during training", "if", "torch", ".", "isnan", "(", "last_loss", ")", ":", "print", "(", "f'Epoch/Batch ({epoch}/{num_batch}): Invalid loss, terminating training.'", ")", "return", "{", "'stop_epoch'", ":", "True", ",", "'stop_training'", ":", "True", ",", "'skip_validate'", ":", "True", "}" ]
Test if `last_loss` is NaN and interrupts training.
[ "Test", "if", "last_loss", "is", "NaN", "and", "interrupts", "training", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/callbacks/tracker.py#L16-L21
20,805
fastai/fastai
fastai/callbacks/tracker.py
TrackerCallback.on_train_begin
def on_train_begin(self, **kwargs:Any)->None: "Initializes the best value." self.best = float('inf') if self.operator == np.less else -float('inf')
python
def on_train_begin(self, **kwargs:Any)->None: "Initializes the best value." self.best = float('inf') if self.operator == np.less else -float('inf')
[ "def", "on_train_begin", "(", "self", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "None", ":", "self", ".", "best", "=", "float", "(", "'inf'", ")", "if", "self", ".", "operator", "==", "np", ".", "less", "else", "-", "float", "(", "'inf'", ")" ]
Initializes the best value.
[ "Initializes", "the", "best", "value", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/callbacks/tracker.py#L35-L37
20,806
fastai/fastai
fastai/callbacks/tracker.py
TrackerCallback.get_monitor_value
def get_monitor_value(self): "Pick the monitored value." if self.monitor=='trn_loss' and len(self.learn.recorder.losses) == 0: return None elif len(self.learn.recorder.val_losses) == 0: return None values = {'train_loss':self.learn.recorder.losses[-1].cpu().numpy(), 'valid_loss':self.learn.recorder.val_losses[-1]} if values['valid_loss'] is None: return if self.learn.recorder.metrics: for m, n in zip(self.learn.recorder.metrics[-1],self.learn.recorder.names[3:-1]): values[n] = m if values.get(self.monitor) is None: warn(f'{self.__class__} conditioned on metric `{self.monitor}` which is not available. Available metrics are: {", ".join(map(str, self.learn.recorder.names[1:-1]))}') return values.get(self.monitor)
python
def get_monitor_value(self): "Pick the monitored value." if self.monitor=='trn_loss' and len(self.learn.recorder.losses) == 0: return None elif len(self.learn.recorder.val_losses) == 0: return None values = {'train_loss':self.learn.recorder.losses[-1].cpu().numpy(), 'valid_loss':self.learn.recorder.val_losses[-1]} if values['valid_loss'] is None: return if self.learn.recorder.metrics: for m, n in zip(self.learn.recorder.metrics[-1],self.learn.recorder.names[3:-1]): values[n] = m if values.get(self.monitor) is None: warn(f'{self.__class__} conditioned on metric `{self.monitor}` which is not available. Available metrics are: {", ".join(map(str, self.learn.recorder.names[1:-1]))}') return values.get(self.monitor)
[ "def", "get_monitor_value", "(", "self", ")", ":", "if", "self", ".", "monitor", "==", "'trn_loss'", "and", "len", "(", "self", ".", "learn", ".", "recorder", ".", "losses", ")", "==", "0", ":", "return", "None", "elif", "len", "(", "self", ".", "learn", ".", "recorder", ".", "val_losses", ")", "==", "0", ":", "return", "None", "values", "=", "{", "'train_loss'", ":", "self", ".", "learn", ".", "recorder", ".", "losses", "[", "-", "1", "]", ".", "cpu", "(", ")", ".", "numpy", "(", ")", ",", "'valid_loss'", ":", "self", ".", "learn", ".", "recorder", ".", "val_losses", "[", "-", "1", "]", "}", "if", "values", "[", "'valid_loss'", "]", "is", "None", ":", "return", "if", "self", ".", "learn", ".", "recorder", ".", "metrics", ":", "for", "m", ",", "n", "in", "zip", "(", "self", ".", "learn", ".", "recorder", ".", "metrics", "[", "-", "1", "]", ",", "self", ".", "learn", ".", "recorder", ".", "names", "[", "3", ":", "-", "1", "]", ")", ":", "values", "[", "n", "]", "=", "m", "if", "values", ".", "get", "(", "self", ".", "monitor", ")", "is", "None", ":", "warn", "(", "f'{self.__class__} conditioned on metric `{self.monitor}` which is not available. Available metrics are: {\", \".join(map(str, self.learn.recorder.names[1:-1]))}'", ")", "return", "values", ".", "get", "(", "self", ".", "monitor", ")" ]
Pick the monitored value.
[ "Pick", "the", "monitored", "value", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/callbacks/tracker.py#L39-L51
20,807
fastai/fastai
fastai/callbacks/tracker.py
SaveModelCallback.on_epoch_end
def on_epoch_end(self, epoch:int, **kwargs:Any)->None: "Compare the value monitored to its best score and maybe save the model." if self.every=="epoch": self.learn.save(f'{self.name}_{epoch}') else: #every="improvement" current = self.get_monitor_value() if current is not None and self.operator(current, self.best): print(f'Better model found at epoch {epoch} with {self.monitor} value: {current}.') self.best = current self.learn.save(f'{self.name}')
python
def on_epoch_end(self, epoch:int, **kwargs:Any)->None: "Compare the value monitored to its best score and maybe save the model." if self.every=="epoch": self.learn.save(f'{self.name}_{epoch}') else: #every="improvement" current = self.get_monitor_value() if current is not None and self.operator(current, self.best): print(f'Better model found at epoch {epoch} with {self.monitor} value: {current}.') self.best = current self.learn.save(f'{self.name}')
[ "def", "on_epoch_end", "(", "self", ",", "epoch", ":", "int", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "None", ":", "if", "self", ".", "every", "==", "\"epoch\"", ":", "self", ".", "learn", ".", "save", "(", "f'{self.name}_{epoch}'", ")", "else", ":", "#every=\"improvement\"", "current", "=", "self", ".", "get_monitor_value", "(", ")", "if", "current", "is", "not", "None", "and", "self", ".", "operator", "(", "current", ",", "self", ".", "best", ")", ":", "print", "(", "f'Better model found at epoch {epoch} with {self.monitor} value: {current}.'", ")", "self", ".", "best", "=", "current", "self", ".", "learn", ".", "save", "(", "f'{self.name}'", ")" ]
Compare the value monitored to its best score and maybe save the model.
[ "Compare", "the", "value", "monitored", "to", "its", "best", "score", "and", "maybe", "save", "the", "model", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/callbacks/tracker.py#L93-L101
20,808
fastai/fastai
fastai/callbacks/tracker.py
ReduceLROnPlateauCallback.on_train_begin
def on_train_begin(self, **kwargs:Any)->None: "Initialize inner arguments." self.wait, self.opt = 0, self.learn.opt super().on_train_begin(**kwargs)
python
def on_train_begin(self, **kwargs:Any)->None: "Initialize inner arguments." self.wait, self.opt = 0, self.learn.opt super().on_train_begin(**kwargs)
[ "def", "on_train_begin", "(", "self", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "None", ":", "self", ".", "wait", ",", "self", ".", "opt", "=", "0", ",", "self", ".", "learn", ".", "opt", "super", "(", ")", ".", "on_train_begin", "(", "*", "*", "kwargs", ")" ]
Initialize inner arguments.
[ "Initialize", "inner", "arguments", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/callbacks/tracker.py#L116-L119
20,809
fastai/fastai
fastai/callbacks/tracker.py
ReduceLROnPlateauCallback.on_epoch_end
def on_epoch_end(self, epoch, **kwargs:Any)->None: "Compare the value monitored to its best and maybe reduce lr." current = self.get_monitor_value() if current is None: return if self.operator(current - self.min_delta, self.best): self.best,self.wait = current,0 else: self.wait += 1 if self.wait > self.patience: self.opt.lr *= self.factor self.wait = 0 print(f'Epoch {epoch}: reducing lr to {self.opt.lr}')
python
def on_epoch_end(self, epoch, **kwargs:Any)->None: "Compare the value monitored to its best and maybe reduce lr." current = self.get_monitor_value() if current is None: return if self.operator(current - self.min_delta, self.best): self.best,self.wait = current,0 else: self.wait += 1 if self.wait > self.patience: self.opt.lr *= self.factor self.wait = 0 print(f'Epoch {epoch}: reducing lr to {self.opt.lr}')
[ "def", "on_epoch_end", "(", "self", ",", "epoch", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "None", ":", "current", "=", "self", ".", "get_monitor_value", "(", ")", "if", "current", "is", "None", ":", "return", "if", "self", ".", "operator", "(", "current", "-", "self", ".", "min_delta", ",", "self", ".", "best", ")", ":", "self", ".", "best", ",", "self", ".", "wait", "=", "current", ",", "0", "else", ":", "self", ".", "wait", "+=", "1", "if", "self", ".", "wait", ">", "self", ".", "patience", ":", "self", ".", "opt", ".", "lr", "*=", "self", ".", "factor", "self", ".", "wait", "=", "0", "print", "(", "f'Epoch {epoch}: reducing lr to {self.opt.lr}'", ")" ]
Compare the value monitored to its best and maybe reduce lr.
[ "Compare", "the", "value", "monitored", "to", "its", "best", "and", "maybe", "reduce", "lr", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/callbacks/tracker.py#L121-L131
20,810
fastai/fastai
fastai/gen_doc/convert2html.py
convert_nb
def convert_nb(fname, dest_path='.'): "Convert a notebook `fname` to html file in `dest_path`." from .gen_notebooks import remove_undoc_cells, remove_code_cell_jupyter_widget_state_elem nb = read_nb(fname) nb['cells'] = remove_undoc_cells(nb['cells']) nb['cells'] = remove_code_cell_jupyter_widget_state_elem(nb['cells']) fname = Path(fname).absolute() dest_name = fname.with_suffix('.html').name meta = nb['metadata'] meta_jekyll = meta['jekyll'] if 'jekyll' in meta else {'title': fname.with_suffix('').name} meta_jekyll['nb_path'] = f'{fname.parent.name}/{fname.name}' with open(f'{dest_path}/{dest_name}','w') as f: f.write(exporter.from_notebook_node(nb, resources=meta_jekyll)[0])
python
def convert_nb(fname, dest_path='.'): "Convert a notebook `fname` to html file in `dest_path`." from .gen_notebooks import remove_undoc_cells, remove_code_cell_jupyter_widget_state_elem nb = read_nb(fname) nb['cells'] = remove_undoc_cells(nb['cells']) nb['cells'] = remove_code_cell_jupyter_widget_state_elem(nb['cells']) fname = Path(fname).absolute() dest_name = fname.with_suffix('.html').name meta = nb['metadata'] meta_jekyll = meta['jekyll'] if 'jekyll' in meta else {'title': fname.with_suffix('').name} meta_jekyll['nb_path'] = f'{fname.parent.name}/{fname.name}' with open(f'{dest_path}/{dest_name}','w') as f: f.write(exporter.from_notebook_node(nb, resources=meta_jekyll)[0])
[ "def", "convert_nb", "(", "fname", ",", "dest_path", "=", "'.'", ")", ":", "from", ".", "gen_notebooks", "import", "remove_undoc_cells", ",", "remove_code_cell_jupyter_widget_state_elem", "nb", "=", "read_nb", "(", "fname", ")", "nb", "[", "'cells'", "]", "=", "remove_undoc_cells", "(", "nb", "[", "'cells'", "]", ")", "nb", "[", "'cells'", "]", "=", "remove_code_cell_jupyter_widget_state_elem", "(", "nb", "[", "'cells'", "]", ")", "fname", "=", "Path", "(", "fname", ")", ".", "absolute", "(", ")", "dest_name", "=", "fname", ".", "with_suffix", "(", "'.html'", ")", ".", "name", "meta", "=", "nb", "[", "'metadata'", "]", "meta_jekyll", "=", "meta", "[", "'jekyll'", "]", "if", "'jekyll'", "in", "meta", "else", "{", "'title'", ":", "fname", ".", "with_suffix", "(", "''", ")", ".", "name", "}", "meta_jekyll", "[", "'nb_path'", "]", "=", "f'{fname.parent.name}/{fname.name}'", "with", "open", "(", "f'{dest_path}/{dest_name}'", ",", "'w'", ")", "as", "f", ":", "f", ".", "write", "(", "exporter", ".", "from_notebook_node", "(", "nb", ",", "resources", "=", "meta_jekyll", ")", "[", "0", "]", ")" ]
Convert a notebook `fname` to html file in `dest_path`.
[ "Convert", "a", "notebook", "fname", "to", "html", "file", "in", "dest_path", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/gen_doc/convert2html.py#L21-L33
20,811
fastai/fastai
fastai/gen_doc/convert2html.py
convert_all
def convert_all(folder, dest_path='.', force_all=False): "Convert modified notebooks in `folder` to html pages in `dest_path`." path = Path(folder) changed_cnt = 0 for fname in path.glob("*.ipynb"): # only rebuild modified files fname_out = Path(dest_path)/fname.with_suffix('.html').name if not force_all and fname_out.exists(): in_mod = os.path.getmtime(fname) out_mod = os.path.getmtime(fname_out) if in_mod < out_mod: continue print(f"converting: {fname} => {fname_out}") changed_cnt += 1 convert_nb(fname, dest_path=dest_path) if not changed_cnt: print("No notebooks were modified")
python
def convert_all(folder, dest_path='.', force_all=False): "Convert modified notebooks in `folder` to html pages in `dest_path`." path = Path(folder) changed_cnt = 0 for fname in path.glob("*.ipynb"): # only rebuild modified files fname_out = Path(dest_path)/fname.with_suffix('.html').name if not force_all and fname_out.exists(): in_mod = os.path.getmtime(fname) out_mod = os.path.getmtime(fname_out) if in_mod < out_mod: continue print(f"converting: {fname} => {fname_out}") changed_cnt += 1 convert_nb(fname, dest_path=dest_path) if not changed_cnt: print("No notebooks were modified")
[ "def", "convert_all", "(", "folder", ",", "dest_path", "=", "'.'", ",", "force_all", "=", "False", ")", ":", "path", "=", "Path", "(", "folder", ")", "changed_cnt", "=", "0", "for", "fname", "in", "path", ".", "glob", "(", "\"*.ipynb\"", ")", ":", "# only rebuild modified files", "fname_out", "=", "Path", "(", "dest_path", ")", "/", "fname", ".", "with_suffix", "(", "'.html'", ")", ".", "name", "if", "not", "force_all", "and", "fname_out", ".", "exists", "(", ")", ":", "in_mod", "=", "os", ".", "path", ".", "getmtime", "(", "fname", ")", "out_mod", "=", "os", ".", "path", ".", "getmtime", "(", "fname_out", ")", "if", "in_mod", "<", "out_mod", ":", "continue", "print", "(", "f\"converting: {fname} => {fname_out}\"", ")", "changed_cnt", "+=", "1", "convert_nb", "(", "fname", ",", "dest_path", "=", "dest_path", ")", "if", "not", "changed_cnt", ":", "print", "(", "\"No notebooks were modified\"", ")" ]
Convert modified notebooks in `folder` to html pages in `dest_path`.
[ "Convert", "modified", "notebooks", "in", "folder", "to", "html", "pages", "in", "dest_path", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/gen_doc/convert2html.py#L35-L51
20,812
fastai/fastai
fastai/text/data.py
pad_collate
def pad_collate(samples:BatchSamples, pad_idx:int=1, pad_first:bool=True, backwards:bool=False) -> Tuple[LongTensor, LongTensor]: "Function that collect samples and adds padding. Flips token order if needed" samples = to_data(samples) max_len = max([len(s[0]) for s in samples]) res = torch.zeros(len(samples), max_len).long() + pad_idx if backwards: pad_first = not pad_first for i,s in enumerate(samples): if pad_first: res[i,-len(s[0]):] = LongTensor(s[0]) else: res[i,:len(s[0]):] = LongTensor(s[0]) if backwards: res = res.flip(1) return res, tensor(np.array([s[1] for s in samples]))
python
def pad_collate(samples:BatchSamples, pad_idx:int=1, pad_first:bool=True, backwards:bool=False) -> Tuple[LongTensor, LongTensor]: "Function that collect samples and adds padding. Flips token order if needed" samples = to_data(samples) max_len = max([len(s[0]) for s in samples]) res = torch.zeros(len(samples), max_len).long() + pad_idx if backwards: pad_first = not pad_first for i,s in enumerate(samples): if pad_first: res[i,-len(s[0]):] = LongTensor(s[0]) else: res[i,:len(s[0]):] = LongTensor(s[0]) if backwards: res = res.flip(1) return res, tensor(np.array([s[1] for s in samples]))
[ "def", "pad_collate", "(", "samples", ":", "BatchSamples", ",", "pad_idx", ":", "int", "=", "1", ",", "pad_first", ":", "bool", "=", "True", ",", "backwards", ":", "bool", "=", "False", ")", "->", "Tuple", "[", "LongTensor", ",", "LongTensor", "]", ":", "samples", "=", "to_data", "(", "samples", ")", "max_len", "=", "max", "(", "[", "len", "(", "s", "[", "0", "]", ")", "for", "s", "in", "samples", "]", ")", "res", "=", "torch", ".", "zeros", "(", "len", "(", "samples", ")", ",", "max_len", ")", ".", "long", "(", ")", "+", "pad_idx", "if", "backwards", ":", "pad_first", "=", "not", "pad_first", "for", "i", ",", "s", "in", "enumerate", "(", "samples", ")", ":", "if", "pad_first", ":", "res", "[", "i", ",", "-", "len", "(", "s", "[", "0", "]", ")", ":", "]", "=", "LongTensor", "(", "s", "[", "0", "]", ")", "else", ":", "res", "[", "i", ",", ":", "len", "(", "s", "[", "0", "]", ")", ":", "]", "=", "LongTensor", "(", "s", "[", "0", "]", ")", "if", "backwards", ":", "res", "=", "res", ".", "flip", "(", "1", ")", "return", "res", ",", "tensor", "(", "np", ".", "array", "(", "[", "s", "[", "1", "]", "for", "s", "in", "samples", "]", ")", ")" ]
Function that collect samples and adds padding. Flips token order if needed
[ "Function", "that", "collect", "samples", "and", "adds", "padding", ".", "Flips", "token", "order", "if", "needed" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/text/data.py#L128-L138
20,813
fastai/fastai
fastai/text/data.py
open_text
def open_text(fn:PathOrStr, enc='utf-8'): "Read the text in `fn`." with open(fn,'r', encoding = enc) as f: return ''.join(f.readlines())
python
def open_text(fn:PathOrStr, enc='utf-8'): "Read the text in `fn`." with open(fn,'r', encoding = enc) as f: return ''.join(f.readlines())
[ "def", "open_text", "(", "fn", ":", "PathOrStr", ",", "enc", "=", "'utf-8'", ")", ":", "with", "open", "(", "fn", ",", "'r'", ",", "encoding", "=", "enc", ")", "as", "f", ":", "return", "''", ".", "join", "(", "f", ".", "readlines", "(", ")", ")" ]
Read the text in `fn`.
[ "Read", "the", "text", "in", "fn", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/text/data.py#L272-L274
20,814
fastai/fastai
fastai/text/data.py
LanguageModelPreLoader.allocate_buffers
def allocate_buffers(self): "Create the ragged array that will be filled when we ask for items." if self.ite_len is None: len(self) self.idx = LanguageModelPreLoader.CircularIndex(len(self.dataset.x.items), not self.backwards) self.batch = np.zeros((self.bs, self.bptt+1), dtype=np.int64) self.batch_x, self.batch_y = self.batch[:,0:self.bptt], self.batch[:,1:self.bptt+1] #ro: index of the text we're at inside our datasets for the various batches self.ro = np.zeros(self.bs, dtype=np.int64) #ri: index of the token we're at inside our current text for the various batches self.ri = np.zeros(self.bs, dtype=np.int)
python
def allocate_buffers(self): "Create the ragged array that will be filled when we ask for items." if self.ite_len is None: len(self) self.idx = LanguageModelPreLoader.CircularIndex(len(self.dataset.x.items), not self.backwards) self.batch = np.zeros((self.bs, self.bptt+1), dtype=np.int64) self.batch_x, self.batch_y = self.batch[:,0:self.bptt], self.batch[:,1:self.bptt+1] #ro: index of the text we're at inside our datasets for the various batches self.ro = np.zeros(self.bs, dtype=np.int64) #ri: index of the token we're at inside our current text for the various batches self.ri = np.zeros(self.bs, dtype=np.int)
[ "def", "allocate_buffers", "(", "self", ")", ":", "if", "self", ".", "ite_len", "is", "None", ":", "len", "(", "self", ")", "self", ".", "idx", "=", "LanguageModelPreLoader", ".", "CircularIndex", "(", "len", "(", "self", ".", "dataset", ".", "x", ".", "items", ")", ",", "not", "self", ".", "backwards", ")", "self", ".", "batch", "=", "np", ".", "zeros", "(", "(", "self", ".", "bs", ",", "self", ".", "bptt", "+", "1", ")", ",", "dtype", "=", "np", ".", "int64", ")", "self", ".", "batch_x", ",", "self", ".", "batch_y", "=", "self", ".", "batch", "[", ":", ",", "0", ":", "self", ".", "bptt", "]", ",", "self", ".", "batch", "[", ":", ",", "1", ":", "self", ".", "bptt", "+", "1", "]", "#ro: index of the text we're at inside our datasets for the various batches", "self", ".", "ro", "=", "np", ".", "zeros", "(", "self", ".", "bs", ",", "dtype", "=", "np", ".", "int64", ")", "#ri: index of the token we're at inside our current text for the various batches", "self", ".", "ri", "=", "np", ".", "zeros", "(", "self", ".", "bs", ",", "dtype", "=", "np", ".", "int", ")" ]
Create the ragged array that will be filled when we ask for items.
[ "Create", "the", "ragged", "array", "that", "will", "be", "filled", "when", "we", "ask", "for", "items", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/text/data.py#L42-L51
20,815
fastai/fastai
fastai/text/data.py
TextDataBunch.from_ids
def from_ids(cls, path:PathOrStr, vocab:Vocab, train_ids:Collection[Collection[int]], valid_ids:Collection[Collection[int]], test_ids:Collection[Collection[int]]=None, train_lbls:Collection[Union[int,float]]=None, valid_lbls:Collection[Union[int,float]]=None, classes:Collection[Any]=None, processor:PreProcessor=None, **kwargs) -> DataBunch: "Create a `TextDataBunch` from ids, labels and a `vocab`. `kwargs` are passed to the dataloader creation." src = ItemLists(path, TextList(train_ids, vocab, path=path, processor=[]), TextList(valid_ids, vocab, path=path, processor=[])) src = src.label_for_lm() if cls==TextLMDataBunch else src.label_from_lists(train_lbls, valid_lbls, classes=classes, processor=[]) if not is1d(train_lbls): src.train.y.one_hot,src.valid.y.one_hot = True,True if test_ids is not None: src.add_test(TextList(test_ids, vocab, path=path), label=train_lbls[0]) src.valid.x.processor = ifnone(processor, [TokenizeProcessor(), NumericalizeProcessor(vocab=vocab)]) return src.databunch(**kwargs)
python
def from_ids(cls, path:PathOrStr, vocab:Vocab, train_ids:Collection[Collection[int]], valid_ids:Collection[Collection[int]], test_ids:Collection[Collection[int]]=None, train_lbls:Collection[Union[int,float]]=None, valid_lbls:Collection[Union[int,float]]=None, classes:Collection[Any]=None, processor:PreProcessor=None, **kwargs) -> DataBunch: "Create a `TextDataBunch` from ids, labels and a `vocab`. `kwargs` are passed to the dataloader creation." src = ItemLists(path, TextList(train_ids, vocab, path=path, processor=[]), TextList(valid_ids, vocab, path=path, processor=[])) src = src.label_for_lm() if cls==TextLMDataBunch else src.label_from_lists(train_lbls, valid_lbls, classes=classes, processor=[]) if not is1d(train_lbls): src.train.y.one_hot,src.valid.y.one_hot = True,True if test_ids is not None: src.add_test(TextList(test_ids, vocab, path=path), label=train_lbls[0]) src.valid.x.processor = ifnone(processor, [TokenizeProcessor(), NumericalizeProcessor(vocab=vocab)]) return src.databunch(**kwargs)
[ "def", "from_ids", "(", "cls", ",", "path", ":", "PathOrStr", ",", "vocab", ":", "Vocab", ",", "train_ids", ":", "Collection", "[", "Collection", "[", "int", "]", "]", ",", "valid_ids", ":", "Collection", "[", "Collection", "[", "int", "]", "]", ",", "test_ids", ":", "Collection", "[", "Collection", "[", "int", "]", "]", "=", "None", ",", "train_lbls", ":", "Collection", "[", "Union", "[", "int", ",", "float", "]", "]", "=", "None", ",", "valid_lbls", ":", "Collection", "[", "Union", "[", "int", ",", "float", "]", "]", "=", "None", ",", "classes", ":", "Collection", "[", "Any", "]", "=", "None", ",", "processor", ":", "PreProcessor", "=", "None", ",", "*", "*", "kwargs", ")", "->", "DataBunch", ":", "src", "=", "ItemLists", "(", "path", ",", "TextList", "(", "train_ids", ",", "vocab", ",", "path", "=", "path", ",", "processor", "=", "[", "]", ")", ",", "TextList", "(", "valid_ids", ",", "vocab", ",", "path", "=", "path", ",", "processor", "=", "[", "]", ")", ")", "src", "=", "src", ".", "label_for_lm", "(", ")", "if", "cls", "==", "TextLMDataBunch", "else", "src", ".", "label_from_lists", "(", "train_lbls", ",", "valid_lbls", ",", "classes", "=", "classes", ",", "processor", "=", "[", "]", ")", "if", "not", "is1d", "(", "train_lbls", ")", ":", "src", ".", "train", ".", "y", ".", "one_hot", ",", "src", ".", "valid", ".", "y", ".", "one_hot", "=", "True", ",", "True", "if", "test_ids", "is", "not", "None", ":", "src", ".", "add_test", "(", "TextList", "(", "test_ids", ",", "vocab", ",", "path", "=", "path", ")", ",", "label", "=", "train_lbls", "[", "0", "]", ")", "src", ".", "valid", ".", "x", ".", "processor", "=", "ifnone", "(", "processor", ",", "[", "TokenizeProcessor", "(", ")", ",", "NumericalizeProcessor", "(", "vocab", "=", "vocab", ")", "]", ")", "return", "src", ".", "databunch", "(", "*", "*", "kwargs", ")" ]
Create a `TextDataBunch` from ids, labels and a `vocab`. `kwargs` are passed to the dataloader creation.
[ "Create", "a", "TextDataBunch", "from", "ids", "labels", "and", "a", "vocab", ".", "kwargs", "are", "passed", "to", "the", "dataloader", "creation", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/text/data.py#L150-L161
20,816
fastai/fastai
fastai/text/data.py
TextDataBunch.from_tokens
def from_tokens(cls, path:PathOrStr, trn_tok:Collection[Collection[str]], trn_lbls:Collection[Union[int,float]], val_tok:Collection[Collection[str]], val_lbls:Collection[Union[int,float]], vocab:Vocab=None, tst_tok:Collection[Collection[str]]=None, classes:Collection[Any]=None, max_vocab:int=60000, min_freq:int=3, **kwargs) -> DataBunch: "Create a `TextDataBunch` from tokens and labels. `kwargs` are passed to the dataloader creation." processor = NumericalizeProcessor(vocab=vocab, max_vocab=max_vocab, min_freq=min_freq) src = ItemLists(path, TextList(trn_tok, path=path, processor=processor), TextList(val_tok, path=path, processor=processor)) src = src.label_for_lm() if cls==TextLMDataBunch else src.label_from_lists(trn_lbls, val_lbls, classes=classes) if tst_tok is not None: src.add_test(TextList(tst_tok, path=path)) return src.databunch(**kwargs)
python
def from_tokens(cls, path:PathOrStr, trn_tok:Collection[Collection[str]], trn_lbls:Collection[Union[int,float]], val_tok:Collection[Collection[str]], val_lbls:Collection[Union[int,float]], vocab:Vocab=None, tst_tok:Collection[Collection[str]]=None, classes:Collection[Any]=None, max_vocab:int=60000, min_freq:int=3, **kwargs) -> DataBunch: "Create a `TextDataBunch` from tokens and labels. `kwargs` are passed to the dataloader creation." processor = NumericalizeProcessor(vocab=vocab, max_vocab=max_vocab, min_freq=min_freq) src = ItemLists(path, TextList(trn_tok, path=path, processor=processor), TextList(val_tok, path=path, processor=processor)) src = src.label_for_lm() if cls==TextLMDataBunch else src.label_from_lists(trn_lbls, val_lbls, classes=classes) if tst_tok is not None: src.add_test(TextList(tst_tok, path=path)) return src.databunch(**kwargs)
[ "def", "from_tokens", "(", "cls", ",", "path", ":", "PathOrStr", ",", "trn_tok", ":", "Collection", "[", "Collection", "[", "str", "]", "]", ",", "trn_lbls", ":", "Collection", "[", "Union", "[", "int", ",", "float", "]", "]", ",", "val_tok", ":", "Collection", "[", "Collection", "[", "str", "]", "]", ",", "val_lbls", ":", "Collection", "[", "Union", "[", "int", ",", "float", "]", "]", ",", "vocab", ":", "Vocab", "=", "None", ",", "tst_tok", ":", "Collection", "[", "Collection", "[", "str", "]", "]", "=", "None", ",", "classes", ":", "Collection", "[", "Any", "]", "=", "None", ",", "max_vocab", ":", "int", "=", "60000", ",", "min_freq", ":", "int", "=", "3", ",", "*", "*", "kwargs", ")", "->", "DataBunch", ":", "processor", "=", "NumericalizeProcessor", "(", "vocab", "=", "vocab", ",", "max_vocab", "=", "max_vocab", ",", "min_freq", "=", "min_freq", ")", "src", "=", "ItemLists", "(", "path", ",", "TextList", "(", "trn_tok", ",", "path", "=", "path", ",", "processor", "=", "processor", ")", ",", "TextList", "(", "val_tok", ",", "path", "=", "path", ",", "processor", "=", "processor", ")", ")", "src", "=", "src", ".", "label_for_lm", "(", ")", "if", "cls", "==", "TextLMDataBunch", "else", "src", ".", "label_from_lists", "(", "trn_lbls", ",", "val_lbls", ",", "classes", "=", "classes", ")", "if", "tst_tok", "is", "not", "None", ":", "src", ".", "add_test", "(", "TextList", "(", "tst_tok", ",", "path", "=", "path", ")", ")", "return", "src", ".", "databunch", "(", "*", "*", "kwargs", ")" ]
Create a `TextDataBunch` from tokens and labels. `kwargs` are passed to the dataloader creation.
[ "Create", "a", "TextDataBunch", "from", "tokens", "and", "labels", ".", "kwargs", "are", "passed", "to", "the", "dataloader", "creation", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/text/data.py#L177-L187
20,817
fastai/fastai
fastai/text/data.py
TextDataBunch.from_df
def from_df(cls, path:PathOrStr, train_df:DataFrame, valid_df:DataFrame, test_df:Optional[DataFrame]=None, tokenizer:Tokenizer=None, vocab:Vocab=None, classes:Collection[str]=None, text_cols:IntsOrStrs=1, label_cols:IntsOrStrs=0, label_delim:str=None, chunksize:int=10000, max_vocab:int=60000, min_freq:int=2, mark_fields:bool=False, include_bos:bool=True, include_eos:bool=False, **kwargs) -> DataBunch: "Create a `TextDataBunch` from DataFrames. `kwargs` are passed to the dataloader creation." processor = _get_processor(tokenizer=tokenizer, vocab=vocab, chunksize=chunksize, max_vocab=max_vocab, min_freq=min_freq, mark_fields=mark_fields, include_bos=include_bos, include_eos=include_eos) if classes is None and is_listy(label_cols) and len(label_cols) > 1: classes = label_cols src = ItemLists(path, TextList.from_df(train_df, path, cols=text_cols, processor=processor), TextList.from_df(valid_df, path, cols=text_cols, processor=processor)) if cls==TextLMDataBunch: src = src.label_for_lm() else: if label_delim is not None: src = src.label_from_df(cols=label_cols, classes=classes, label_delim=label_delim) else: src = src.label_from_df(cols=label_cols, classes=classes) if test_df is not None: src.add_test(TextList.from_df(test_df, path, cols=text_cols)) return src.databunch(**kwargs)
python
def from_df(cls, path:PathOrStr, train_df:DataFrame, valid_df:DataFrame, test_df:Optional[DataFrame]=None, tokenizer:Tokenizer=None, vocab:Vocab=None, classes:Collection[str]=None, text_cols:IntsOrStrs=1, label_cols:IntsOrStrs=0, label_delim:str=None, chunksize:int=10000, max_vocab:int=60000, min_freq:int=2, mark_fields:bool=False, include_bos:bool=True, include_eos:bool=False, **kwargs) -> DataBunch: "Create a `TextDataBunch` from DataFrames. `kwargs` are passed to the dataloader creation." processor = _get_processor(tokenizer=tokenizer, vocab=vocab, chunksize=chunksize, max_vocab=max_vocab, min_freq=min_freq, mark_fields=mark_fields, include_bos=include_bos, include_eos=include_eos) if classes is None and is_listy(label_cols) and len(label_cols) > 1: classes = label_cols src = ItemLists(path, TextList.from_df(train_df, path, cols=text_cols, processor=processor), TextList.from_df(valid_df, path, cols=text_cols, processor=processor)) if cls==TextLMDataBunch: src = src.label_for_lm() else: if label_delim is not None: src = src.label_from_df(cols=label_cols, classes=classes, label_delim=label_delim) else: src = src.label_from_df(cols=label_cols, classes=classes) if test_df is not None: src.add_test(TextList.from_df(test_df, path, cols=text_cols)) return src.databunch(**kwargs)
[ "def", "from_df", "(", "cls", ",", "path", ":", "PathOrStr", ",", "train_df", ":", "DataFrame", ",", "valid_df", ":", "DataFrame", ",", "test_df", ":", "Optional", "[", "DataFrame", "]", "=", "None", ",", "tokenizer", ":", "Tokenizer", "=", "None", ",", "vocab", ":", "Vocab", "=", "None", ",", "classes", ":", "Collection", "[", "str", "]", "=", "None", ",", "text_cols", ":", "IntsOrStrs", "=", "1", ",", "label_cols", ":", "IntsOrStrs", "=", "0", ",", "label_delim", ":", "str", "=", "None", ",", "chunksize", ":", "int", "=", "10000", ",", "max_vocab", ":", "int", "=", "60000", ",", "min_freq", ":", "int", "=", "2", ",", "mark_fields", ":", "bool", "=", "False", ",", "include_bos", ":", "bool", "=", "True", ",", "include_eos", ":", "bool", "=", "False", ",", "*", "*", "kwargs", ")", "->", "DataBunch", ":", "processor", "=", "_get_processor", "(", "tokenizer", "=", "tokenizer", ",", "vocab", "=", "vocab", ",", "chunksize", "=", "chunksize", ",", "max_vocab", "=", "max_vocab", ",", "min_freq", "=", "min_freq", ",", "mark_fields", "=", "mark_fields", ",", "include_bos", "=", "include_bos", ",", "include_eos", "=", "include_eos", ")", "if", "classes", "is", "None", "and", "is_listy", "(", "label_cols", ")", "and", "len", "(", "label_cols", ")", ">", "1", ":", "classes", "=", "label_cols", "src", "=", "ItemLists", "(", "path", ",", "TextList", ".", "from_df", "(", "train_df", ",", "path", ",", "cols", "=", "text_cols", ",", "processor", "=", "processor", ")", ",", "TextList", ".", "from_df", "(", "valid_df", ",", "path", ",", "cols", "=", "text_cols", ",", "processor", "=", "processor", ")", ")", "if", "cls", "==", "TextLMDataBunch", ":", "src", "=", "src", ".", "label_for_lm", "(", ")", "else", ":", "if", "label_delim", "is", "not", "None", ":", "src", "=", "src", ".", "label_from_df", "(", "cols", "=", "label_cols", ",", "classes", "=", "classes", ",", "label_delim", "=", "label_delim", ")", "else", ":", "src", "=", "src", ".", "label_from_df", "(", "cols", "=", "label_cols", ",", "classes", "=", "classes", ")", "if", "test_df", "is", "not", "None", ":", "src", ".", "add_test", "(", "TextList", ".", "from_df", "(", "test_df", ",", "path", ",", "cols", "=", "text_cols", ")", ")", "return", "src", ".", "databunch", "(", "*", "*", "kwargs", ")" ]
Create a `TextDataBunch` from DataFrames. `kwargs` are passed to the dataloader creation.
[ "Create", "a", "TextDataBunch", "from", "DataFrames", ".", "kwargs", "are", "passed", "to", "the", "dataloader", "creation", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/text/data.py#L190-L206
20,818
fastai/fastai
fastai/text/data.py
TextDataBunch.from_csv
def from_csv(cls, path:PathOrStr, csv_name, valid_pct:float=0.2, test:Optional[str]=None, tokenizer:Tokenizer=None, vocab:Vocab=None, classes:Collection[str]=None, delimiter:str=None, header='infer', text_cols:IntsOrStrs=1, label_cols:IntsOrStrs=0, label_delim:str=None, chunksize:int=10000, max_vocab:int=60000, min_freq:int=2, mark_fields:bool=False, include_bos:bool=True, include_eos:bool=False, **kwargs) -> DataBunch: "Create a `TextDataBunch` from texts in csv files. `kwargs` are passed to the dataloader creation." df = pd.read_csv(Path(path)/csv_name, header=header, delimiter=delimiter) df = df.iloc[np.random.permutation(len(df))] cut = int(valid_pct * len(df)) + 1 train_df, valid_df = df[cut:], df[:cut] test_df = None if test is None else pd.read_csv(Path(path)/test, header=header, delimiter=delimiter) return cls.from_df(path, train_df, valid_df, test_df, tokenizer=tokenizer, vocab=vocab, classes=classes, text_cols=text_cols, label_cols=label_cols, label_delim=label_delim, chunksize=chunksize, max_vocab=max_vocab, min_freq=min_freq, mark_fields=mark_fields, include_bos=include_bos, include_eos=include_eos, **kwargs)
python
def from_csv(cls, path:PathOrStr, csv_name, valid_pct:float=0.2, test:Optional[str]=None, tokenizer:Tokenizer=None, vocab:Vocab=None, classes:Collection[str]=None, delimiter:str=None, header='infer', text_cols:IntsOrStrs=1, label_cols:IntsOrStrs=0, label_delim:str=None, chunksize:int=10000, max_vocab:int=60000, min_freq:int=2, mark_fields:bool=False, include_bos:bool=True, include_eos:bool=False, **kwargs) -> DataBunch: "Create a `TextDataBunch` from texts in csv files. `kwargs` are passed to the dataloader creation." df = pd.read_csv(Path(path)/csv_name, header=header, delimiter=delimiter) df = df.iloc[np.random.permutation(len(df))] cut = int(valid_pct * len(df)) + 1 train_df, valid_df = df[cut:], df[:cut] test_df = None if test is None else pd.read_csv(Path(path)/test, header=header, delimiter=delimiter) return cls.from_df(path, train_df, valid_df, test_df, tokenizer=tokenizer, vocab=vocab, classes=classes, text_cols=text_cols, label_cols=label_cols, label_delim=label_delim, chunksize=chunksize, max_vocab=max_vocab, min_freq=min_freq, mark_fields=mark_fields, include_bos=include_bos, include_eos=include_eos, **kwargs)
[ "def", "from_csv", "(", "cls", ",", "path", ":", "PathOrStr", ",", "csv_name", ",", "valid_pct", ":", "float", "=", "0.2", ",", "test", ":", "Optional", "[", "str", "]", "=", "None", ",", "tokenizer", ":", "Tokenizer", "=", "None", ",", "vocab", ":", "Vocab", "=", "None", ",", "classes", ":", "Collection", "[", "str", "]", "=", "None", ",", "delimiter", ":", "str", "=", "None", ",", "header", "=", "'infer'", ",", "text_cols", ":", "IntsOrStrs", "=", "1", ",", "label_cols", ":", "IntsOrStrs", "=", "0", ",", "label_delim", ":", "str", "=", "None", ",", "chunksize", ":", "int", "=", "10000", ",", "max_vocab", ":", "int", "=", "60000", ",", "min_freq", ":", "int", "=", "2", ",", "mark_fields", ":", "bool", "=", "False", ",", "include_bos", ":", "bool", "=", "True", ",", "include_eos", ":", "bool", "=", "False", ",", "*", "*", "kwargs", ")", "->", "DataBunch", ":", "df", "=", "pd", ".", "read_csv", "(", "Path", "(", "path", ")", "/", "csv_name", ",", "header", "=", "header", ",", "delimiter", "=", "delimiter", ")", "df", "=", "df", ".", "iloc", "[", "np", ".", "random", ".", "permutation", "(", "len", "(", "df", ")", ")", "]", "cut", "=", "int", "(", "valid_pct", "*", "len", "(", "df", ")", ")", "+", "1", "train_df", ",", "valid_df", "=", "df", "[", "cut", ":", "]", ",", "df", "[", ":", "cut", "]", "test_df", "=", "None", "if", "test", "is", "None", "else", "pd", ".", "read_csv", "(", "Path", "(", "path", ")", "/", "test", ",", "header", "=", "header", ",", "delimiter", "=", "delimiter", ")", "return", "cls", ".", "from_df", "(", "path", ",", "train_df", ",", "valid_df", ",", "test_df", ",", "tokenizer", "=", "tokenizer", ",", "vocab", "=", "vocab", ",", "classes", "=", "classes", ",", "text_cols", "=", "text_cols", ",", "label_cols", "=", "label_cols", ",", "label_delim", "=", "label_delim", ",", "chunksize", "=", "chunksize", ",", "max_vocab", "=", "max_vocab", ",", "min_freq", "=", "min_freq", ",", "mark_fields", "=", "mark_fields", ",", "include_bos", "=", "include_bos", ",", "include_eos", "=", "include_eos", ",", "*", "*", "kwargs", ")" ]
Create a `TextDataBunch` from texts in csv files. `kwargs` are passed to the dataloader creation.
[ "Create", "a", "TextDataBunch", "from", "texts", "in", "csv", "files", ".", "kwargs", "are", "passed", "to", "the", "dataloader", "creation", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/text/data.py#L209-L223
20,819
fastai/fastai
fastai/text/data.py
TextDataBunch.from_folder
def from_folder(cls, path:PathOrStr, train:str='train', valid:str='valid', test:Optional[str]=None, classes:Collection[Any]=None, tokenizer:Tokenizer=None, vocab:Vocab=None, chunksize:int=10000, max_vocab:int=60000, min_freq:int=2, mark_fields:bool=False, include_bos:bool=True, include_eos:bool=False, **kwargs): "Create a `TextDataBunch` from text files in folders." path = Path(path).absolute() processor = [OpenFileProcessor()] + _get_processor(tokenizer=tokenizer, vocab=vocab, chunksize=chunksize, max_vocab=max_vocab, min_freq=min_freq, mark_fields=mark_fields, include_bos=include_bos, include_eos=include_eos) src = (TextList.from_folder(path, processor=processor) .split_by_folder(train=train, valid=valid)) src = src.label_for_lm() if cls==TextLMDataBunch else src.label_from_folder(classes=classes) if test is not None: src.add_test_folder(path/test) return src.databunch(**kwargs)
python
def from_folder(cls, path:PathOrStr, train:str='train', valid:str='valid', test:Optional[str]=None, classes:Collection[Any]=None, tokenizer:Tokenizer=None, vocab:Vocab=None, chunksize:int=10000, max_vocab:int=60000, min_freq:int=2, mark_fields:bool=False, include_bos:bool=True, include_eos:bool=False, **kwargs): "Create a `TextDataBunch` from text files in folders." path = Path(path).absolute() processor = [OpenFileProcessor()] + _get_processor(tokenizer=tokenizer, vocab=vocab, chunksize=chunksize, max_vocab=max_vocab, min_freq=min_freq, mark_fields=mark_fields, include_bos=include_bos, include_eos=include_eos) src = (TextList.from_folder(path, processor=processor) .split_by_folder(train=train, valid=valid)) src = src.label_for_lm() if cls==TextLMDataBunch else src.label_from_folder(classes=classes) if test is not None: src.add_test_folder(path/test) return src.databunch(**kwargs)
[ "def", "from_folder", "(", "cls", ",", "path", ":", "PathOrStr", ",", "train", ":", "str", "=", "'train'", ",", "valid", ":", "str", "=", "'valid'", ",", "test", ":", "Optional", "[", "str", "]", "=", "None", ",", "classes", ":", "Collection", "[", "Any", "]", "=", "None", ",", "tokenizer", ":", "Tokenizer", "=", "None", ",", "vocab", ":", "Vocab", "=", "None", ",", "chunksize", ":", "int", "=", "10000", ",", "max_vocab", ":", "int", "=", "60000", ",", "min_freq", ":", "int", "=", "2", ",", "mark_fields", ":", "bool", "=", "False", ",", "include_bos", ":", "bool", "=", "True", ",", "include_eos", ":", "bool", "=", "False", ",", "*", "*", "kwargs", ")", ":", "path", "=", "Path", "(", "path", ")", ".", "absolute", "(", ")", "processor", "=", "[", "OpenFileProcessor", "(", ")", "]", "+", "_get_processor", "(", "tokenizer", "=", "tokenizer", ",", "vocab", "=", "vocab", ",", "chunksize", "=", "chunksize", ",", "max_vocab", "=", "max_vocab", ",", "min_freq", "=", "min_freq", ",", "mark_fields", "=", "mark_fields", ",", "include_bos", "=", "include_bos", ",", "include_eos", "=", "include_eos", ")", "src", "=", "(", "TextList", ".", "from_folder", "(", "path", ",", "processor", "=", "processor", ")", ".", "split_by_folder", "(", "train", "=", "train", ",", "valid", "=", "valid", ")", ")", "src", "=", "src", ".", "label_for_lm", "(", ")", "if", "cls", "==", "TextLMDataBunch", "else", "src", ".", "label_from_folder", "(", "classes", "=", "classes", ")", "if", "test", "is", "not", "None", ":", "src", ".", "add_test_folder", "(", "path", "/", "test", ")", "return", "src", ".", "databunch", "(", "*", "*", "kwargs", ")" ]
Create a `TextDataBunch` from text files in folders.
[ "Create", "a", "TextDataBunch", "from", "text", "files", "in", "folders", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/text/data.py#L226-L237
20,820
fastai/fastai
fastai/text/data.py
TextList.label_for_lm
def label_for_lm(self, **kwargs): "A special labelling method for language models." self.__class__ = LMTextList kwargs['label_cls'] = LMLabelList return self.label_const(0, **kwargs)
python
def label_for_lm(self, **kwargs): "A special labelling method for language models." self.__class__ = LMTextList kwargs['label_cls'] = LMLabelList return self.label_const(0, **kwargs)
[ "def", "label_for_lm", "(", "self", ",", "*", "*", "kwargs", ")", ":", "self", ".", "__class__", "=", "LMTextList", "kwargs", "[", "'label_cls'", "]", "=", "LMLabelList", "return", "self", ".", "label_const", "(", "0", ",", "*", "*", "kwargs", ")" ]
A special labelling method for language models.
[ "A", "special", "labelling", "method", "for", "language", "models", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/text/data.py#L330-L334
20,821
fastai/fastai
fastai/text/data.py
TextList.from_folder
def from_folder(cls, path:PathOrStr='.', extensions:Collection[str]=text_extensions, vocab:Vocab=None, processor:PreProcessor=None, **kwargs)->'TextList': "Get the list of files in `path` that have a text suffix. `recurse` determines if we search subfolders." processor = ifnone(processor, [OpenFileProcessor(), TokenizeProcessor(), NumericalizeProcessor(vocab=vocab)]) return super().from_folder(path=path, extensions=extensions, processor=processor, **kwargs)
python
def from_folder(cls, path:PathOrStr='.', extensions:Collection[str]=text_extensions, vocab:Vocab=None, processor:PreProcessor=None, **kwargs)->'TextList': "Get the list of files in `path` that have a text suffix. `recurse` determines if we search subfolders." processor = ifnone(processor, [OpenFileProcessor(), TokenizeProcessor(), NumericalizeProcessor(vocab=vocab)]) return super().from_folder(path=path, extensions=extensions, processor=processor, **kwargs)
[ "def", "from_folder", "(", "cls", ",", "path", ":", "PathOrStr", "=", "'.'", ",", "extensions", ":", "Collection", "[", "str", "]", "=", "text_extensions", ",", "vocab", ":", "Vocab", "=", "None", ",", "processor", ":", "PreProcessor", "=", "None", ",", "*", "*", "kwargs", ")", "->", "'TextList'", ":", "processor", "=", "ifnone", "(", "processor", ",", "[", "OpenFileProcessor", "(", ")", ",", "TokenizeProcessor", "(", ")", ",", "NumericalizeProcessor", "(", "vocab", "=", "vocab", ")", "]", ")", "return", "super", "(", ")", ".", "from_folder", "(", "path", "=", "path", ",", "extensions", "=", "extensions", ",", "processor", "=", "processor", ",", "*", "*", "kwargs", ")" ]
Get the list of files in `path` that have a text suffix. `recurse` determines if we search subfolders.
[ "Get", "the", "list", "of", "files", "in", "path", "that", "have", "a", "text", "suffix", ".", "recurse", "determines", "if", "we", "search", "subfolders", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/text/data.py#L342-L346
20,822
fastai/fastai
old/fastai/conv_learner.py
ConvLearner.predict_array
def predict_array(self, arr): """ This over-ride is necessary because otherwise the learner method accesses the wrong model when it is called with precompute set to true Args: arr: a numpy array to be used as input to the model for prediction purposes Returns: a numpy array containing the predictions from the model """ precompute = self.precompute self.precompute = False pred = super().predict_array(arr) self.precompute = precompute return pred
python
def predict_array(self, arr): """ This over-ride is necessary because otherwise the learner method accesses the wrong model when it is called with precompute set to true Args: arr: a numpy array to be used as input to the model for prediction purposes Returns: a numpy array containing the predictions from the model """ precompute = self.precompute self.precompute = False pred = super().predict_array(arr) self.precompute = precompute return pred
[ "def", "predict_array", "(", "self", ",", "arr", ")", ":", "precompute", "=", "self", ".", "precompute", "self", ".", "precompute", "=", "False", "pred", "=", "super", "(", ")", ".", "predict_array", "(", "arr", ")", "self", ".", "precompute", "=", "precompute", "return", "pred" ]
This over-ride is necessary because otherwise the learner method accesses the wrong model when it is called with precompute set to true Args: arr: a numpy array to be used as input to the model for prediction purposes Returns: a numpy array containing the predictions from the model
[ "This", "over", "-", "ride", "is", "necessary", "because", "otherwise", "the", "learner", "method", "accesses", "the", "wrong", "model", "when", "it", "is", "called", "with", "precompute", "set", "to", "true" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/old/fastai/conv_learner.py#L211-L225
20,823
fastai/fastai
fastai/callbacks/hooks.py
hook_output
def hook_output (module:nn.Module, detach:bool=True, grad:bool=False)->Hook: "Return a `Hook` that stores activations of `module` in `self.stored`" return Hook(module, _hook_inner, detach=detach, is_forward=not grad)
python
def hook_output (module:nn.Module, detach:bool=True, grad:bool=False)->Hook: "Return a `Hook` that stores activations of `module` in `self.stored`" return Hook(module, _hook_inner, detach=detach, is_forward=not grad)
[ "def", "hook_output", "(", "module", ":", "nn", ".", "Module", ",", "detach", ":", "bool", "=", "True", ",", "grad", ":", "bool", "=", "False", ")", "->", "Hook", ":", "return", "Hook", "(", "module", ",", "_hook_inner", ",", "detach", "=", "detach", ",", "is_forward", "=", "not", "grad", ")" ]
Return a `Hook` that stores activations of `module` in `self.stored`
[ "Return", "a", "Hook", "that", "stores", "activations", "of", "module", "in", "self", ".", "stored" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/callbacks/hooks.py#L54-L56
20,824
fastai/fastai
fastai/callbacks/hooks.py
hook_outputs
def hook_outputs(modules:Collection[nn.Module], detach:bool=True, grad:bool=False)->Hooks: "Return `Hooks` that store activations of all `modules` in `self.stored`" return Hooks(modules, _hook_inner, detach=detach, is_forward=not grad)
python
def hook_outputs(modules:Collection[nn.Module], detach:bool=True, grad:bool=False)->Hooks: "Return `Hooks` that store activations of all `modules` in `self.stored`" return Hooks(modules, _hook_inner, detach=detach, is_forward=not grad)
[ "def", "hook_outputs", "(", "modules", ":", "Collection", "[", "nn", ".", "Module", "]", ",", "detach", ":", "bool", "=", "True", ",", "grad", ":", "bool", "=", "False", ")", "->", "Hooks", ":", "return", "Hooks", "(", "modules", ",", "_hook_inner", ",", "detach", "=", "detach", ",", "is_forward", "=", "not", "grad", ")" ]
Return `Hooks` that store activations of all `modules` in `self.stored`
[ "Return", "Hooks", "that", "store", "activations", "of", "all", "modules", "in", "self", ".", "stored" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/callbacks/hooks.py#L58-L60
20,825
fastai/fastai
fastai/callbacks/hooks.py
dummy_batch
def dummy_batch(m: nn.Module, size:tuple=(64,64))->Tensor: "Create a dummy batch to go through `m` with `size`." ch_in = in_channels(m) return one_param(m).new(1, ch_in, *size).requires_grad_(False).uniform_(-1.,1.)
python
def dummy_batch(m: nn.Module, size:tuple=(64,64))->Tensor: "Create a dummy batch to go through `m` with `size`." ch_in = in_channels(m) return one_param(m).new(1, ch_in, *size).requires_grad_(False).uniform_(-1.,1.)
[ "def", "dummy_batch", "(", "m", ":", "nn", ".", "Module", ",", "size", ":", "tuple", "=", "(", "64", ",", "64", ")", ")", "->", "Tensor", ":", "ch_in", "=", "in_channels", "(", "m", ")", "return", "one_param", "(", "m", ")", ".", "new", "(", "1", ",", "ch_in", ",", "*", "size", ")", ".", "requires_grad_", "(", "False", ")", ".", "uniform_", "(", "-", "1.", ",", "1.", ")" ]
Create a dummy batch to go through `m` with `size`.
[ "Create", "a", "dummy", "batch", "to", "go", "through", "m", "with", "size", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/callbacks/hooks.py#L101-L104
20,826
fastai/fastai
fastai/callbacks/hooks.py
dummy_eval
def dummy_eval(m:nn.Module, size:tuple=(64,64)): "Pass a `dummy_batch` in evaluation mode in `m` with `size`." return m.eval()(dummy_batch(m, size))
python
def dummy_eval(m:nn.Module, size:tuple=(64,64)): "Pass a `dummy_batch` in evaluation mode in `m` with `size`." return m.eval()(dummy_batch(m, size))
[ "def", "dummy_eval", "(", "m", ":", "nn", ".", "Module", ",", "size", ":", "tuple", "=", "(", "64", ",", "64", ")", ")", ":", "return", "m", ".", "eval", "(", ")", "(", "dummy_batch", "(", "m", ",", "size", ")", ")" ]
Pass a `dummy_batch` in evaluation mode in `m` with `size`.
[ "Pass", "a", "dummy_batch", "in", "evaluation", "mode", "in", "m", "with", "size", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/callbacks/hooks.py#L106-L108
20,827
fastai/fastai
fastai/callbacks/hooks.py
model_sizes
def model_sizes(m:nn.Module, size:tuple=(64,64))->Tuple[Sizes,Tensor,Hooks]: "Pass a dummy input through the model `m` to get the various sizes of activations." with hook_outputs(m) as hooks: x = dummy_eval(m, size) return [o.stored.shape for o in hooks]
python
def model_sizes(m:nn.Module, size:tuple=(64,64))->Tuple[Sizes,Tensor,Hooks]: "Pass a dummy input through the model `m` to get the various sizes of activations." with hook_outputs(m) as hooks: x = dummy_eval(m, size) return [o.stored.shape for o in hooks]
[ "def", "model_sizes", "(", "m", ":", "nn", ".", "Module", ",", "size", ":", "tuple", "=", "(", "64", ",", "64", ")", ")", "->", "Tuple", "[", "Sizes", ",", "Tensor", ",", "Hooks", "]", ":", "with", "hook_outputs", "(", "m", ")", "as", "hooks", ":", "x", "=", "dummy_eval", "(", "m", ",", "size", ")", "return", "[", "o", ".", "stored", ".", "shape", "for", "o", "in", "hooks", "]" ]
Pass a dummy input through the model `m` to get the various sizes of activations.
[ "Pass", "a", "dummy", "input", "through", "the", "model", "m", "to", "get", "the", "various", "sizes", "of", "activations", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/callbacks/hooks.py#L110-L114
20,828
fastai/fastai
fastai/callbacks/hooks.py
num_features_model
def num_features_model(m:nn.Module)->int: "Return the number of output features for `model`." sz = 64 while True: try: return model_sizes(m, size=(sz,sz))[-1][1] except Exception as e: sz *= 2 if sz > 2048: raise
python
def num_features_model(m:nn.Module)->int: "Return the number of output features for `model`." sz = 64 while True: try: return model_sizes(m, size=(sz,sz))[-1][1] except Exception as e: sz *= 2 if sz > 2048: raise
[ "def", "num_features_model", "(", "m", ":", "nn", ".", "Module", ")", "->", "int", ":", "sz", "=", "64", "while", "True", ":", "try", ":", "return", "model_sizes", "(", "m", ",", "size", "=", "(", "sz", ",", "sz", ")", ")", "[", "-", "1", "]", "[", "1", "]", "except", "Exception", "as", "e", ":", "sz", "*=", "2", "if", "sz", ">", "2048", ":", "raise" ]
Return the number of output features for `model`.
[ "Return", "the", "number", "of", "output", "features", "for", "model", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/callbacks/hooks.py#L116-L123
20,829
fastai/fastai
fastai/callbacks/hooks.py
model_summary
def model_summary(m:Learner, n:int=70): "Print a summary of `m` using a output text width of `n` chars" info = layers_info(m) header = ["Layer (type)", "Output Shape", "Param #", "Trainable"] res = "=" * n + "\n" res += f"{header[0]:<20} {header[1]:<20} {header[2]:<10} {header[3]:<10}\n" res += "=" * n + "\n" total_params = 0 total_trainable_params = 0 for layer, size, params, trainable in info: if size is None: continue total_params += int(params) total_trainable_params += int(params) * trainable size, trainable = str(list(size)), str(trainable) res += f"{layer:<20} {size:<20} {int(params):<10,} {trainable:<10}\n" res += "_" * n + "\n" res += f"\nTotal params: {total_params:,}\n" res += f"Total trainable params: {total_trainable_params:,}\n" res += f"Total non-trainable params: {total_params - total_trainable_params:,}\n" return PrettyString(res)
python
def model_summary(m:Learner, n:int=70): "Print a summary of `m` using a output text width of `n` chars" info = layers_info(m) header = ["Layer (type)", "Output Shape", "Param #", "Trainable"] res = "=" * n + "\n" res += f"{header[0]:<20} {header[1]:<20} {header[2]:<10} {header[3]:<10}\n" res += "=" * n + "\n" total_params = 0 total_trainable_params = 0 for layer, size, params, trainable in info: if size is None: continue total_params += int(params) total_trainable_params += int(params) * trainable size, trainable = str(list(size)), str(trainable) res += f"{layer:<20} {size:<20} {int(params):<10,} {trainable:<10}\n" res += "_" * n + "\n" res += f"\nTotal params: {total_params:,}\n" res += f"Total trainable params: {total_trainable_params:,}\n" res += f"Total non-trainable params: {total_params - total_trainable_params:,}\n" return PrettyString(res)
[ "def", "model_summary", "(", "m", ":", "Learner", ",", "n", ":", "int", "=", "70", ")", ":", "info", "=", "layers_info", "(", "m", ")", "header", "=", "[", "\"Layer (type)\"", ",", "\"Output Shape\"", ",", "\"Param #\"", ",", "\"Trainable\"", "]", "res", "=", "\"=\"", "*", "n", "+", "\"\\n\"", "res", "+=", "f\"{header[0]:<20} {header[1]:<20} {header[2]:<10} {header[3]:<10}\\n\"", "res", "+=", "\"=\"", "*", "n", "+", "\"\\n\"", "total_params", "=", "0", "total_trainable_params", "=", "0", "for", "layer", ",", "size", ",", "params", ",", "trainable", "in", "info", ":", "if", "size", "is", "None", ":", "continue", "total_params", "+=", "int", "(", "params", ")", "total_trainable_params", "+=", "int", "(", "params", ")", "*", "trainable", "size", ",", "trainable", "=", "str", "(", "list", "(", "size", ")", ")", ",", "str", "(", "trainable", ")", "res", "+=", "f\"{layer:<20} {size:<20} {int(params):<10,} {trainable:<10}\\n\"", "res", "+=", "\"_\"", "*", "n", "+", "\"\\n\"", "res", "+=", "f\"\\nTotal params: {total_params:,}\\n\"", "res", "+=", "f\"Total trainable params: {total_trainable_params:,}\\n\"", "res", "+=", "f\"Total non-trainable params: {total_params - total_trainable_params:,}\\n\"", "return", "PrettyString", "(", "res", ")" ]
Print a summary of `m` using a output text width of `n` chars
[ "Print", "a", "summary", "of", "m", "using", "a", "output", "text", "width", "of", "n", "chars" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/callbacks/hooks.py#L165-L184
20,830
fastai/fastai
fastai/callbacks/hooks.py
Hook.hook_fn
def hook_fn(self, module:nn.Module, input:Tensors, output:Tensors): "Applies `hook_func` to `module`, `input`, `output`." if self.detach: input = (o.detach() for o in input ) if is_listy(input ) else input.detach() output = (o.detach() for o in output) if is_listy(output) else output.detach() self.stored = self.hook_func(module, input, output)
python
def hook_fn(self, module:nn.Module, input:Tensors, output:Tensors): "Applies `hook_func` to `module`, `input`, `output`." if self.detach: input = (o.detach() for o in input ) if is_listy(input ) else input.detach() output = (o.detach() for o in output) if is_listy(output) else output.detach() self.stored = self.hook_func(module, input, output)
[ "def", "hook_fn", "(", "self", ",", "module", ":", "nn", ".", "Module", ",", "input", ":", "Tensors", ",", "output", ":", "Tensors", ")", ":", "if", "self", ".", "detach", ":", "input", "=", "(", "o", ".", "detach", "(", ")", "for", "o", "in", "input", ")", "if", "is_listy", "(", "input", ")", "else", "input", ".", "detach", "(", ")", "output", "=", "(", "o", ".", "detach", "(", ")", "for", "o", "in", "output", ")", "if", "is_listy", "(", "output", ")", "else", "output", ".", "detach", "(", ")", "self", ".", "stored", "=", "self", ".", "hook_func", "(", "module", ",", "input", ",", "output", ")" ]
Applies `hook_func` to `module`, `input`, `output`.
[ "Applies", "hook_func", "to", "module", "input", "output", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/callbacks/hooks.py#L18-L23
20,831
fastai/fastai
fastai/callbacks/hooks.py
Hook.remove
def remove(self): "Remove the hook from the model." if not self.removed: self.hook.remove() self.removed=True
python
def remove(self): "Remove the hook from the model." if not self.removed: self.hook.remove() self.removed=True
[ "def", "remove", "(", "self", ")", ":", "if", "not", "self", ".", "removed", ":", "self", ".", "hook", ".", "remove", "(", ")", "self", ".", "removed", "=", "True" ]
Remove the hook from the model.
[ "Remove", "the", "hook", "from", "the", "model", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/callbacks/hooks.py#L25-L29
20,832
fastai/fastai
fastai/callbacks/hooks.py
HookCallback.on_train_begin
def on_train_begin(self, **kwargs): "Register the `Hooks` on `self.modules`." if not self.modules: self.modules = [m for m in flatten_model(self.learn.model) if hasattr(m, 'weight')] self.hooks = Hooks(self.modules, self.hook)
python
def on_train_begin(self, **kwargs): "Register the `Hooks` on `self.modules`." if not self.modules: self.modules = [m for m in flatten_model(self.learn.model) if hasattr(m, 'weight')] self.hooks = Hooks(self.modules, self.hook)
[ "def", "on_train_begin", "(", "self", ",", "*", "*", "kwargs", ")", ":", "if", "not", "self", ".", "modules", ":", "self", ".", "modules", "=", "[", "m", "for", "m", "in", "flatten_model", "(", "self", ".", "learn", ".", "model", ")", "if", "hasattr", "(", "m", ",", "'weight'", ")", "]", "self", ".", "hooks", "=", "Hooks", "(", "self", ".", "modules", ",", "self", ".", "hook", ")" ]
Register the `Hooks` on `self.modules`.
[ "Register", "the", "Hooks", "on", "self", ".", "modules", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/callbacks/hooks.py#L68-L73
20,833
fastai/fastai
fastai/callbacks/hooks.py
ActivationStats.hook
def hook(self, m:nn.Module, i:Tensors, o:Tensors)->Tuple[Rank0Tensor,Rank0Tensor]: "Take the mean and std of `o`." return o.mean().item(),o.std().item()
python
def hook(self, m:nn.Module, i:Tensors, o:Tensors)->Tuple[Rank0Tensor,Rank0Tensor]: "Take the mean and std of `o`." return o.mean().item(),o.std().item()
[ "def", "hook", "(", "self", ",", "m", ":", "nn", ".", "Module", ",", "i", ":", "Tensors", ",", "o", ":", "Tensors", ")", "->", "Tuple", "[", "Rank0Tensor", ",", "Rank0Tensor", "]", ":", "return", "o", ".", "mean", "(", ")", ".", "item", "(", ")", ",", "o", ".", "std", "(", ")", ".", "item", "(", ")" ]
Take the mean and std of `o`.
[ "Take", "the", "mean", "and", "std", "of", "o", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/callbacks/hooks.py#L91-L93
20,834
fastai/fastai
fastai/callbacks/hooks.py
ActivationStats.on_batch_end
def on_batch_end(self, train, **kwargs): "Take the stored results and puts it in `self.stats`" if train: self.stats.append(self.hooks.stored)
python
def on_batch_end(self, train, **kwargs): "Take the stored results and puts it in `self.stats`" if train: self.stats.append(self.hooks.stored)
[ "def", "on_batch_end", "(", "self", ",", "train", ",", "*", "*", "kwargs", ")", ":", "if", "train", ":", "self", ".", "stats", ".", "append", "(", "self", ".", "hooks", ".", "stored", ")" ]
Take the stored results and puts it in `self.stats`
[ "Take", "the", "stored", "results", "and", "puts", "it", "in", "self", ".", "stats" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/callbacks/hooks.py#L94-L96
20,835
fastai/fastai
old/fastai/plots.py
plots_from_files
def plots_from_files(imspaths, figsize=(10,5), rows=1, titles=None, maintitle=None): """Plots images given image files. Arguments: im_paths (list): list of paths figsize (tuple): figure size rows (int): number of rows titles (list): list of titles maintitle (string): main title """ f = plt.figure(figsize=figsize) if maintitle is not None: plt.suptitle(maintitle, fontsize=16) for i in range(len(imspaths)): sp = f.add_subplot(rows, ceildiv(len(imspaths), rows), i+1) sp.axis('Off') if titles is not None: sp.set_title(titles[i], fontsize=16) img = plt.imread(imspaths[i]) plt.imshow(img)
python
def plots_from_files(imspaths, figsize=(10,5), rows=1, titles=None, maintitle=None): """Plots images given image files. Arguments: im_paths (list): list of paths figsize (tuple): figure size rows (int): number of rows titles (list): list of titles maintitle (string): main title """ f = plt.figure(figsize=figsize) if maintitle is not None: plt.suptitle(maintitle, fontsize=16) for i in range(len(imspaths)): sp = f.add_subplot(rows, ceildiv(len(imspaths), rows), i+1) sp.axis('Off') if titles is not None: sp.set_title(titles[i], fontsize=16) img = plt.imread(imspaths[i]) plt.imshow(img)
[ "def", "plots_from_files", "(", "imspaths", ",", "figsize", "=", "(", "10", ",", "5", ")", ",", "rows", "=", "1", ",", "titles", "=", "None", ",", "maintitle", "=", "None", ")", ":", "f", "=", "plt", ".", "figure", "(", "figsize", "=", "figsize", ")", "if", "maintitle", "is", "not", "None", ":", "plt", ".", "suptitle", "(", "maintitle", ",", "fontsize", "=", "16", ")", "for", "i", "in", "range", "(", "len", "(", "imspaths", ")", ")", ":", "sp", "=", "f", ".", "add_subplot", "(", "rows", ",", "ceildiv", "(", "len", "(", "imspaths", ")", ",", "rows", ")", ",", "i", "+", "1", ")", "sp", ".", "axis", "(", "'Off'", ")", "if", "titles", "is", "not", "None", ":", "sp", ".", "set_title", "(", "titles", "[", "i", "]", ",", "fontsize", "=", "16", ")", "img", "=", "plt", ".", "imread", "(", "imspaths", "[", "i", "]", ")", "plt", ".", "imshow", "(", "img", ")" ]
Plots images given image files. Arguments: im_paths (list): list of paths figsize (tuple): figure size rows (int): number of rows titles (list): list of titles maintitle (string): main title
[ "Plots", "images", "given", "image", "files", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/old/fastai/plots.py#L22-L39
20,836
fastai/fastai
old/fastai/plots.py
ImageModelResults.plot_val_with_title
def plot_val_with_title(self, idxs, y): """ Displays the images and their probabilities of belonging to a certain class Arguments: idxs (numpy.ndarray): indexes of the image samples from the dataset y (int): the selected class Returns: Plots the images in n rows [rows = n] """ # if there are any samples to be displayed if len(idxs) > 0: imgs = np.stack([self.ds[x][0] for x in idxs]) title_probs = [self.probs[x,y] for x in idxs] return plots(self.ds.denorm(imgs), rows=1, titles=title_probs) # if idxs is empty return false else: return False;
python
def plot_val_with_title(self, idxs, y): """ Displays the images and their probabilities of belonging to a certain class Arguments: idxs (numpy.ndarray): indexes of the image samples from the dataset y (int): the selected class Returns: Plots the images in n rows [rows = n] """ # if there are any samples to be displayed if len(idxs) > 0: imgs = np.stack([self.ds[x][0] for x in idxs]) title_probs = [self.probs[x,y] for x in idxs] return plots(self.ds.denorm(imgs), rows=1, titles=title_probs) # if idxs is empty return false else: return False;
[ "def", "plot_val_with_title", "(", "self", ",", "idxs", ",", "y", ")", ":", "# if there are any samples to be displayed", "if", "len", "(", "idxs", ")", ">", "0", ":", "imgs", "=", "np", ".", "stack", "(", "[", "self", ".", "ds", "[", "x", "]", "[", "0", "]", "for", "x", "in", "idxs", "]", ")", "title_probs", "=", "[", "self", ".", "probs", "[", "x", ",", "y", "]", "for", "x", "in", "idxs", "]", "return", "plots", "(", "self", ".", "ds", ".", "denorm", "(", "imgs", ")", ",", "rows", "=", "1", ",", "titles", "=", "title_probs", ")", "# if idxs is empty return false", "else", ":", "return", "False" ]
Displays the images and their probabilities of belonging to a certain class Arguments: idxs (numpy.ndarray): indexes of the image samples from the dataset y (int): the selected class Returns: Plots the images in n rows [rows = n]
[ "Displays", "the", "images", "and", "their", "probabilities", "of", "belonging", "to", "a", "certain", "class" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/old/fastai/plots.py#L99-L117
20,837
fastai/fastai
old/fastai/plots.py
ImageModelResults.most_uncertain_by_mask
def most_uncertain_by_mask(self, mask, y): """ Extracts the first 4 most uncertain indexes from the ordered list of probabilities Arguments: mask (numpy.ndarray): the mask of probabilities specific to the selected class; a boolean array with shape (num_of_samples,) which contains True where class==selected_class, and False everywhere else y (int): the selected class Returns: idxs (ndarray): An array of indexes of length 4 """ idxs = np.where(mask)[0] # the most uncertain samples will have abs(probs-1/num_classes) close to 0; return idxs[np.argsort(np.abs(self.probs[idxs,y]-(1/self.num_classes)))[:4]]
python
def most_uncertain_by_mask(self, mask, y): """ Extracts the first 4 most uncertain indexes from the ordered list of probabilities Arguments: mask (numpy.ndarray): the mask of probabilities specific to the selected class; a boolean array with shape (num_of_samples,) which contains True where class==selected_class, and False everywhere else y (int): the selected class Returns: idxs (ndarray): An array of indexes of length 4 """ idxs = np.where(mask)[0] # the most uncertain samples will have abs(probs-1/num_classes) close to 0; return idxs[np.argsort(np.abs(self.probs[idxs,y]-(1/self.num_classes)))[:4]]
[ "def", "most_uncertain_by_mask", "(", "self", ",", "mask", ",", "y", ")", ":", "idxs", "=", "np", ".", "where", "(", "mask", ")", "[", "0", "]", "# the most uncertain samples will have abs(probs-1/num_classes) close to 0;", "return", "idxs", "[", "np", ".", "argsort", "(", "np", ".", "abs", "(", "self", ".", "probs", "[", "idxs", ",", "y", "]", "-", "(", "1", "/", "self", ".", "num_classes", ")", ")", ")", "[", ":", "4", "]", "]" ]
Extracts the first 4 most uncertain indexes from the ordered list of probabilities Arguments: mask (numpy.ndarray): the mask of probabilities specific to the selected class; a boolean array with shape (num_of_samples,) which contains True where class==selected_class, and False everywhere else y (int): the selected class Returns: idxs (ndarray): An array of indexes of length 4
[ "Extracts", "the", "first", "4", "most", "uncertain", "indexes", "from", "the", "ordered", "list", "of", "probabilities" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/old/fastai/plots.py#L134-L146
20,838
fastai/fastai
fastai/launch.py
main
def main( gpus:Param("The GPUs to use for distributed training", str)='all', script:Param("Script to run", str, opt=False)='', args:Param("Args to pass to script", nargs='...', opt=False)='' ): "PyTorch distributed training launch helper that spawns multiple distributed processes" # Loosely based on torch.distributed.launch current_env = os.environ.copy() gpus = list(range(torch.cuda.device_count())) if gpus=='all' else list(gpus) current_env["WORLD_SIZE"] = str(len(gpus)) current_env["MASTER_ADDR"] = '127.0.0.1' current_env["MASTER_PORT"] = '29500' processes = [] for i,gpu in enumerate(gpus): current_env["RANK"] = str(i) cmd = [sys.executable, "-u", script, f"--gpu={gpu}"] + args process = subprocess.Popen(cmd, env=current_env) processes.append(process) for process in processes: process.wait()
python
def main( gpus:Param("The GPUs to use for distributed training", str)='all', script:Param("Script to run", str, opt=False)='', args:Param("Args to pass to script", nargs='...', opt=False)='' ): "PyTorch distributed training launch helper that spawns multiple distributed processes" # Loosely based on torch.distributed.launch current_env = os.environ.copy() gpus = list(range(torch.cuda.device_count())) if gpus=='all' else list(gpus) current_env["WORLD_SIZE"] = str(len(gpus)) current_env["MASTER_ADDR"] = '127.0.0.1' current_env["MASTER_PORT"] = '29500' processes = [] for i,gpu in enumerate(gpus): current_env["RANK"] = str(i) cmd = [sys.executable, "-u", script, f"--gpu={gpu}"] + args process = subprocess.Popen(cmd, env=current_env) processes.append(process) for process in processes: process.wait()
[ "def", "main", "(", "gpus", ":", "Param", "(", "\"The GPUs to use for distributed training\"", ",", "str", ")", "=", "'all'", ",", "script", ":", "Param", "(", "\"Script to run\"", ",", "str", ",", "opt", "=", "False", ")", "=", "''", ",", "args", ":", "Param", "(", "\"Args to pass to script\"", ",", "nargs", "=", "'...'", ",", "opt", "=", "False", ")", "=", "''", ")", ":", "# Loosely based on torch.distributed.launch", "current_env", "=", "os", ".", "environ", ".", "copy", "(", ")", "gpus", "=", "list", "(", "range", "(", "torch", ".", "cuda", ".", "device_count", "(", ")", ")", ")", "if", "gpus", "==", "'all'", "else", "list", "(", "gpus", ")", "current_env", "[", "\"WORLD_SIZE\"", "]", "=", "str", "(", "len", "(", "gpus", ")", ")", "current_env", "[", "\"MASTER_ADDR\"", "]", "=", "'127.0.0.1'", "current_env", "[", "\"MASTER_PORT\"", "]", "=", "'29500'", "processes", "=", "[", "]", "for", "i", ",", "gpu", "in", "enumerate", "(", "gpus", ")", ":", "current_env", "[", "\"RANK\"", "]", "=", "str", "(", "i", ")", "cmd", "=", "[", "sys", ".", "executable", ",", "\"-u\"", ",", "script", ",", "f\"--gpu={gpu}\"", "]", "+", "args", "process", "=", "subprocess", ".", "Popen", "(", "cmd", ",", "env", "=", "current_env", ")", "processes", ".", "append", "(", "process", ")", "for", "process", "in", "processes", ":", "process", ".", "wait", "(", ")" ]
PyTorch distributed training launch helper that spawns multiple distributed processes
[ "PyTorch", "distributed", "training", "launch", "helper", "that", "spawns", "multiple", "distributed", "processes" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/launch.py#L5-L25
20,839
fastai/fastai
fastai/callbacks/loss_metrics.py
LossMetrics.on_train_begin
def on_train_begin(self, **kwargs): "Add the metrics names to the `Recorder`." self.names = ifnone(self.learn.loss_func.metric_names, []) if not self.names: warn('LossMetrics requested but no loss_func.metric_names provided') self.learn.recorder.add_metric_names(self.names)
python
def on_train_begin(self, **kwargs): "Add the metrics names to the `Recorder`." self.names = ifnone(self.learn.loss_func.metric_names, []) if not self.names: warn('LossMetrics requested but no loss_func.metric_names provided') self.learn.recorder.add_metric_names(self.names)
[ "def", "on_train_begin", "(", "self", ",", "*", "*", "kwargs", ")", ":", "self", ".", "names", "=", "ifnone", "(", "self", ".", "learn", ".", "loss_func", ".", "metric_names", ",", "[", "]", ")", "if", "not", "self", ".", "names", ":", "warn", "(", "'LossMetrics requested but no loss_func.metric_names provided'", ")", "self", ".", "learn", ".", "recorder", ".", "add_metric_names", "(", "self", ".", "names", ")" ]
Add the metrics names to the `Recorder`.
[ "Add", "the", "metrics", "names", "to", "the", "Recorder", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/callbacks/loss_metrics.py#L11-L15
20,840
fastai/fastai
fastai/callbacks/loss_metrics.py
LossMetrics.on_epoch_begin
def on_epoch_begin(self, **kwargs): "Initialize the metrics for this epoch." self.metrics = {name:0. for name in self.names} self.nums = 0
python
def on_epoch_begin(self, **kwargs): "Initialize the metrics for this epoch." self.metrics = {name:0. for name in self.names} self.nums = 0
[ "def", "on_epoch_begin", "(", "self", ",", "*", "*", "kwargs", ")", ":", "self", ".", "metrics", "=", "{", "name", ":", "0.", "for", "name", "in", "self", ".", "names", "}", "self", ".", "nums", "=", "0" ]
Initialize the metrics for this epoch.
[ "Initialize", "the", "metrics", "for", "this", "epoch", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/callbacks/loss_metrics.py#L17-L20
20,841
fastai/fastai
fastai/callbacks/loss_metrics.py
LossMetrics.on_batch_end
def on_batch_end(self, last_target, train, **kwargs): "Update the metrics if not `train`" if train: return bs = last_target.size(0) for name in self.names: self.metrics[name] += bs * self.learn.loss_func.metrics[name].detach().cpu() self.nums += bs
python
def on_batch_end(self, last_target, train, **kwargs): "Update the metrics if not `train`" if train: return bs = last_target.size(0) for name in self.names: self.metrics[name] += bs * self.learn.loss_func.metrics[name].detach().cpu() self.nums += bs
[ "def", "on_batch_end", "(", "self", ",", "last_target", ",", "train", ",", "*", "*", "kwargs", ")", ":", "if", "train", ":", "return", "bs", "=", "last_target", ".", "size", "(", "0", ")", "for", "name", "in", "self", ".", "names", ":", "self", ".", "metrics", "[", "name", "]", "+=", "bs", "*", "self", ".", "learn", ".", "loss_func", ".", "metrics", "[", "name", "]", ".", "detach", "(", ")", ".", "cpu", "(", ")", "self", ".", "nums", "+=", "bs" ]
Update the metrics if not `train`
[ "Update", "the", "metrics", "if", "not", "train" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/callbacks/loss_metrics.py#L22-L28
20,842
fastai/fastai
fastai/callbacks/loss_metrics.py
LossMetrics.on_epoch_end
def on_epoch_end(self, last_metrics, **kwargs): "Finish the computation and sends the result to the Recorder." if not self.nums: return metrics = [self.metrics[name]/self.nums for name in self.names] return {'last_metrics': last_metrics+metrics}
python
def on_epoch_end(self, last_metrics, **kwargs): "Finish the computation and sends the result to the Recorder." if not self.nums: return metrics = [self.metrics[name]/self.nums for name in self.names] return {'last_metrics': last_metrics+metrics}
[ "def", "on_epoch_end", "(", "self", ",", "last_metrics", ",", "*", "*", "kwargs", ")", ":", "if", "not", "self", ".", "nums", ":", "return", "metrics", "=", "[", "self", ".", "metrics", "[", "name", "]", "/", "self", ".", "nums", "for", "name", "in", "self", ".", "names", "]", "return", "{", "'last_metrics'", ":", "last_metrics", "+", "metrics", "}" ]
Finish the computation and sends the result to the Recorder.
[ "Finish", "the", "computation", "and", "sends", "the", "result", "to", "the", "Recorder", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/callbacks/loss_metrics.py#L30-L34
20,843
fastai/fastai
fastai/vision/cyclegan.py
CycleGANTrainer.on_train_begin
def on_train_begin(self, **kwargs): "Create the various optimizers." self.G_A,self.G_B = self.learn.model.G_A,self.learn.model.G_B self.D_A,self.D_B = self.learn.model.D_A,self.learn.model.D_B self.crit = self.learn.loss_func.crit self.opt_G = self.learn.opt.new([nn.Sequential(*flatten_model(self.G_A), *flatten_model(self.G_B))]) self.opt_D_A = self.learn.opt.new([nn.Sequential(*flatten_model(self.D_A))]) self.opt_D_B = self.learn.opt.new([nn.Sequential(*flatten_model(self.D_B))]) self.learn.opt.opt = self.opt_G.opt self._set_trainable() self.names = ['idt_loss', 'gen_loss', 'cyc_loss', 'da_loss', 'db_loss'] self.learn.recorder.no_val=True self.learn.recorder.add_metric_names(self.names) self.smootheners = {n:SmoothenValue(0.98) for n in self.names}
python
def on_train_begin(self, **kwargs): "Create the various optimizers." self.G_A,self.G_B = self.learn.model.G_A,self.learn.model.G_B self.D_A,self.D_B = self.learn.model.D_A,self.learn.model.D_B self.crit = self.learn.loss_func.crit self.opt_G = self.learn.opt.new([nn.Sequential(*flatten_model(self.G_A), *flatten_model(self.G_B))]) self.opt_D_A = self.learn.opt.new([nn.Sequential(*flatten_model(self.D_A))]) self.opt_D_B = self.learn.opt.new([nn.Sequential(*flatten_model(self.D_B))]) self.learn.opt.opt = self.opt_G.opt self._set_trainable() self.names = ['idt_loss', 'gen_loss', 'cyc_loss', 'da_loss', 'db_loss'] self.learn.recorder.no_val=True self.learn.recorder.add_metric_names(self.names) self.smootheners = {n:SmoothenValue(0.98) for n in self.names}
[ "def", "on_train_begin", "(", "self", ",", "*", "*", "kwargs", ")", ":", "self", ".", "G_A", ",", "self", ".", "G_B", "=", "self", ".", "learn", ".", "model", ".", "G_A", ",", "self", ".", "learn", ".", "model", ".", "G_B", "self", ".", "D_A", ",", "self", ".", "D_B", "=", "self", ".", "learn", ".", "model", ".", "D_A", ",", "self", ".", "learn", ".", "model", ".", "D_B", "self", ".", "crit", "=", "self", ".", "learn", ".", "loss_func", ".", "crit", "self", ".", "opt_G", "=", "self", ".", "learn", ".", "opt", ".", "new", "(", "[", "nn", ".", "Sequential", "(", "*", "flatten_model", "(", "self", ".", "G_A", ")", ",", "*", "flatten_model", "(", "self", ".", "G_B", ")", ")", "]", ")", "self", ".", "opt_D_A", "=", "self", ".", "learn", ".", "opt", ".", "new", "(", "[", "nn", ".", "Sequential", "(", "*", "flatten_model", "(", "self", ".", "D_A", ")", ")", "]", ")", "self", ".", "opt_D_B", "=", "self", ".", "learn", ".", "opt", ".", "new", "(", "[", "nn", ".", "Sequential", "(", "*", "flatten_model", "(", "self", ".", "D_B", ")", ")", "]", ")", "self", ".", "learn", ".", "opt", ".", "opt", "=", "self", ".", "opt_G", ".", "opt", "self", ".", "_set_trainable", "(", ")", "self", ".", "names", "=", "[", "'idt_loss'", ",", "'gen_loss'", ",", "'cyc_loss'", ",", "'da_loss'", ",", "'db_loss'", "]", "self", ".", "learn", ".", "recorder", ".", "no_val", "=", "True", "self", ".", "learn", ".", "recorder", ".", "add_metric_names", "(", "self", ".", "names", ")", "self", ".", "smootheners", "=", "{", "n", ":", "SmoothenValue", "(", "0.98", ")", "for", "n", "in", "self", ".", "names", "}" ]
Create the various optimizers.
[ "Create", "the", "various", "optimizers", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/vision/cyclegan.py#L145-L158
20,844
fastai/fastai
fastai/vision/cyclegan.py
CycleGANTrainer.on_batch_end
def on_batch_end(self, last_input, last_output, **kwargs): "Steps through the generators then each of the critics." self.G_A.zero_grad(); self.G_B.zero_grad() fake_A, fake_B = last_output[0].detach(), last_output[1].detach() real_A, real_B = last_input self._set_trainable(D_A=True) self.D_A.zero_grad() loss_D_A = 0.5 * (self.crit(self.D_A(real_A), True) + self.crit(self.D_A(fake_A), False)) loss_D_A.backward() self.opt_D_A.step() self._set_trainable(D_B=True) self.D_B.zero_grad() loss_D_B = 0.5 * (self.crit(self.D_B(real_B), True) + self.crit(self.D_B(fake_B), False)) loss_D_B.backward() self.opt_D_B.step() self._set_trainable() metrics = self.learn.loss_func.metrics + [loss_D_A, loss_D_B] for n,m in zip(self.names,metrics): self.smootheners[n].add_value(m)
python
def on_batch_end(self, last_input, last_output, **kwargs): "Steps through the generators then each of the critics." self.G_A.zero_grad(); self.G_B.zero_grad() fake_A, fake_B = last_output[0].detach(), last_output[1].detach() real_A, real_B = last_input self._set_trainable(D_A=True) self.D_A.zero_grad() loss_D_A = 0.5 * (self.crit(self.D_A(real_A), True) + self.crit(self.D_A(fake_A), False)) loss_D_A.backward() self.opt_D_A.step() self._set_trainable(D_B=True) self.D_B.zero_grad() loss_D_B = 0.5 * (self.crit(self.D_B(real_B), True) + self.crit(self.D_B(fake_B), False)) loss_D_B.backward() self.opt_D_B.step() self._set_trainable() metrics = self.learn.loss_func.metrics + [loss_D_A, loss_D_B] for n,m in zip(self.names,metrics): self.smootheners[n].add_value(m)
[ "def", "on_batch_end", "(", "self", ",", "last_input", ",", "last_output", ",", "*", "*", "kwargs", ")", ":", "self", ".", "G_A", ".", "zero_grad", "(", ")", "self", ".", "G_B", ".", "zero_grad", "(", ")", "fake_A", ",", "fake_B", "=", "last_output", "[", "0", "]", ".", "detach", "(", ")", ",", "last_output", "[", "1", "]", ".", "detach", "(", ")", "real_A", ",", "real_B", "=", "last_input", "self", ".", "_set_trainable", "(", "D_A", "=", "True", ")", "self", ".", "D_A", ".", "zero_grad", "(", ")", "loss_D_A", "=", "0.5", "*", "(", "self", ".", "crit", "(", "self", ".", "D_A", "(", "real_A", ")", ",", "True", ")", "+", "self", ".", "crit", "(", "self", ".", "D_A", "(", "fake_A", ")", ",", "False", ")", ")", "loss_D_A", ".", "backward", "(", ")", "self", ".", "opt_D_A", ".", "step", "(", ")", "self", ".", "_set_trainable", "(", "D_B", "=", "True", ")", "self", ".", "D_B", ".", "zero_grad", "(", ")", "loss_D_B", "=", "0.5", "*", "(", "self", ".", "crit", "(", "self", ".", "D_B", "(", "real_B", ")", ",", "True", ")", "+", "self", ".", "crit", "(", "self", ".", "D_B", "(", "fake_B", ")", ",", "False", ")", ")", "loss_D_B", ".", "backward", "(", ")", "self", ".", "opt_D_B", ".", "step", "(", ")", "self", ".", "_set_trainable", "(", ")", "metrics", "=", "self", ".", "learn", ".", "loss_func", ".", "metrics", "+", "[", "loss_D_A", ",", "loss_D_B", "]", "for", "n", ",", "m", "in", "zip", "(", "self", ".", "names", ",", "metrics", ")", ":", "self", ".", "smootheners", "[", "n", "]", ".", "add_value", "(", "m", ")" ]
Steps through the generators then each of the critics.
[ "Steps", "through", "the", "generators", "then", "each", "of", "the", "critics", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/vision/cyclegan.py#L164-L181
20,845
fastai/fastai
fastai/vision/cyclegan.py
CycleGANTrainer.on_epoch_end
def on_epoch_end(self, last_metrics, **kwargs): "Put the various losses in the recorder." return add_metrics(last_metrics, [s.smooth for k,s in self.smootheners.items()])
python
def on_epoch_end(self, last_metrics, **kwargs): "Put the various losses in the recorder." return add_metrics(last_metrics, [s.smooth for k,s in self.smootheners.items()])
[ "def", "on_epoch_end", "(", "self", ",", "last_metrics", ",", "*", "*", "kwargs", ")", ":", "return", "add_metrics", "(", "last_metrics", ",", "[", "s", ".", "smooth", "for", "k", ",", "s", "in", "self", ".", "smootheners", ".", "items", "(", ")", "]", ")" ]
Put the various losses in the recorder.
[ "Put", "the", "various", "losses", "in", "the", "recorder", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/vision/cyclegan.py#L183-L185
20,846
fastai/fastai
fastai/callbacks/csv_logger.py
CSVLogger.on_train_begin
def on_train_begin(self, **kwargs: Any) -> None: "Prepare file with metric names." self.path.parent.mkdir(parents=True, exist_ok=True) self.file = self.path.open('a') if self.append else self.path.open('w') self.file.write(','.join(self.learn.recorder.names[:(None if self.add_time else -1)]) + '\n')
python
def on_train_begin(self, **kwargs: Any) -> None: "Prepare file with metric names." self.path.parent.mkdir(parents=True, exist_ok=True) self.file = self.path.open('a') if self.append else self.path.open('w') self.file.write(','.join(self.learn.recorder.names[:(None if self.add_time else -1)]) + '\n')
[ "def", "on_train_begin", "(", "self", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "None", ":", "self", ".", "path", ".", "parent", ".", "mkdir", "(", "parents", "=", "True", ",", "exist_ok", "=", "True", ")", "self", ".", "file", "=", "self", ".", "path", ".", "open", "(", "'a'", ")", "if", "self", ".", "append", "else", "self", ".", "path", ".", "open", "(", "'w'", ")", "self", ".", "file", ".", "write", "(", "','", ".", "join", "(", "self", ".", "learn", ".", "recorder", ".", "names", "[", ":", "(", "None", "if", "self", ".", "add_time", "else", "-", "1", ")", "]", ")", "+", "'\\n'", ")" ]
Prepare file with metric names.
[ "Prepare", "file", "with", "metric", "names", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/callbacks/csv_logger.py#L23-L27
20,847
fastai/fastai
fastai/callbacks/csv_logger.py
CSVLogger.on_epoch_end
def on_epoch_end(self, epoch: int, smooth_loss: Tensor, last_metrics: MetricsList, **kwargs: Any) -> bool: "Add a line with `epoch` number, `smooth_loss` and `last_metrics`." last_metrics = ifnone(last_metrics, []) stats = [str(stat) if isinstance(stat, int) else '#na#' if stat is None else f'{stat:.6f}' for name, stat in zip(self.learn.recorder.names, [epoch, smooth_loss] + last_metrics)] if self.add_time: stats.append(format_time(time() - self.start_epoch)) str_stats = ','.join(stats) self.file.write(str_stats + '\n')
python
def on_epoch_end(self, epoch: int, smooth_loss: Tensor, last_metrics: MetricsList, **kwargs: Any) -> bool: "Add a line with `epoch` number, `smooth_loss` and `last_metrics`." last_metrics = ifnone(last_metrics, []) stats = [str(stat) if isinstance(stat, int) else '#na#' if stat is None else f'{stat:.6f}' for name, stat in zip(self.learn.recorder.names, [epoch, smooth_loss] + last_metrics)] if self.add_time: stats.append(format_time(time() - self.start_epoch)) str_stats = ','.join(stats) self.file.write(str_stats + '\n')
[ "def", "on_epoch_end", "(", "self", ",", "epoch", ":", "int", ",", "smooth_loss", ":", "Tensor", ",", "last_metrics", ":", "MetricsList", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "bool", ":", "last_metrics", "=", "ifnone", "(", "last_metrics", ",", "[", "]", ")", "stats", "=", "[", "str", "(", "stat", ")", "if", "isinstance", "(", "stat", ",", "int", ")", "else", "'#na#'", "if", "stat", "is", "None", "else", "f'{stat:.6f}'", "for", "name", ",", "stat", "in", "zip", "(", "self", ".", "learn", ".", "recorder", ".", "names", ",", "[", "epoch", ",", "smooth_loss", "]", "+", "last_metrics", ")", "]", "if", "self", ".", "add_time", ":", "stats", ".", "append", "(", "format_time", "(", "time", "(", ")", "-", "self", ".", "start_epoch", ")", ")", "str_stats", "=", "','", ".", "join", "(", "stats", ")", "self", ".", "file", ".", "write", "(", "str_stats", "+", "'\\n'", ")" ]
Add a line with `epoch` number, `smooth_loss` and `last_metrics`.
[ "Add", "a", "line", "with", "epoch", "number", "smooth_loss", "and", "last_metrics", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/callbacks/csv_logger.py#L32-L39
20,848
fastai/fastai
fastai/callbacks/fp16.py
get_master
def get_master(layer_groups:ModuleList, flat_master:bool=False) -> Tuple[List[List[Tensor]], List[List[Tensor]]]: "Return two lists, one for the model parameters in FP16 and one for the master parameters in FP32." split_params = split_no_wd_params(layer_groups) model_params = [[param for param in pg if param.requires_grad] for pg in split_params] if flat_master: master_params = [] for lg in model_params: if len(lg) !=0 : mp = parameters_to_vector([param.data.float() for param in lg]) mp = torch.nn.Parameter(mp, requires_grad=True) if mp.grad is None: mp.grad = mp.new(*mp.size()) master_params.append([mp]) else: master_params.append([]) return model_params, master_params else: master_params = [[param.clone().float().detach() for param in lg] for lg in model_params] for mp in master_params: for param in mp: param.requires_grad = True return model_params, master_params
python
def get_master(layer_groups:ModuleList, flat_master:bool=False) -> Tuple[List[List[Tensor]], List[List[Tensor]]]: "Return two lists, one for the model parameters in FP16 and one for the master parameters in FP32." split_params = split_no_wd_params(layer_groups) model_params = [[param for param in pg if param.requires_grad] for pg in split_params] if flat_master: master_params = [] for lg in model_params: if len(lg) !=0 : mp = parameters_to_vector([param.data.float() for param in lg]) mp = torch.nn.Parameter(mp, requires_grad=True) if mp.grad is None: mp.grad = mp.new(*mp.size()) master_params.append([mp]) else: master_params.append([]) return model_params, master_params else: master_params = [[param.clone().float().detach() for param in lg] for lg in model_params] for mp in master_params: for param in mp: param.requires_grad = True return model_params, master_params
[ "def", "get_master", "(", "layer_groups", ":", "ModuleList", ",", "flat_master", ":", "bool", "=", "False", ")", "->", "Tuple", "[", "List", "[", "List", "[", "Tensor", "]", "]", ",", "List", "[", "List", "[", "Tensor", "]", "]", "]", ":", "split_params", "=", "split_no_wd_params", "(", "layer_groups", ")", "model_params", "=", "[", "[", "param", "for", "param", "in", "pg", "if", "param", ".", "requires_grad", "]", "for", "pg", "in", "split_params", "]", "if", "flat_master", ":", "master_params", "=", "[", "]", "for", "lg", "in", "model_params", ":", "if", "len", "(", "lg", ")", "!=", "0", ":", "mp", "=", "parameters_to_vector", "(", "[", "param", ".", "data", ".", "float", "(", ")", "for", "param", "in", "lg", "]", ")", "mp", "=", "torch", ".", "nn", ".", "Parameter", "(", "mp", ",", "requires_grad", "=", "True", ")", "if", "mp", ".", "grad", "is", "None", ":", "mp", ".", "grad", "=", "mp", ".", "new", "(", "*", "mp", ".", "size", "(", ")", ")", "master_params", ".", "append", "(", "[", "mp", "]", ")", "else", ":", "master_params", ".", "append", "(", "[", "]", ")", "return", "model_params", ",", "master_params", "else", ":", "master_params", "=", "[", "[", "param", ".", "clone", "(", ")", ".", "float", "(", ")", ".", "detach", "(", ")", "for", "param", "in", "lg", "]", "for", "lg", "in", "model_params", "]", "for", "mp", "in", "master_params", ":", "for", "param", "in", "mp", ":", "param", ".", "requires_grad", "=", "True", "return", "model_params", ",", "master_params" ]
Return two lists, one for the model parameters in FP16 and one for the master parameters in FP32.
[ "Return", "two", "lists", "one", "for", "the", "model", "parameters", "in", "FP16", "and", "one", "for", "the", "master", "parameters", "in", "FP32", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/callbacks/fp16.py#L10-L28
20,849
fastai/fastai
fastai/callbacks/fp16.py
model_g2master_g
def model_g2master_g(model_params:Sequence[Tensor], master_params:Sequence[Tensor], flat_master:bool=False)->None: "Copy the `model_params` gradients to `master_params` for the optimizer step." if flat_master: for model_group,master_group in zip(model_params,master_params): if len(master_group) != 0: if master_group[0].grad is None: master_group[0].grad = master_group[0].data.new(*master_group[0].data.size()) master_group[0].grad.data.copy_(parameters_to_vector([p.grad.data.float() for p in model_group])) else: for model_group,master_group in zip(model_params,master_params): for model, master in zip(model_group, master_group): if model.grad is not None: if master.grad is None: master.grad = master.data.new(*master.data.size()) master.grad.data.copy_(model.grad.data) else: master.grad = None
python
def model_g2master_g(model_params:Sequence[Tensor], master_params:Sequence[Tensor], flat_master:bool=False)->None: "Copy the `model_params` gradients to `master_params` for the optimizer step." if flat_master: for model_group,master_group in zip(model_params,master_params): if len(master_group) != 0: if master_group[0].grad is None: master_group[0].grad = master_group[0].data.new(*master_group[0].data.size()) master_group[0].grad.data.copy_(parameters_to_vector([p.grad.data.float() for p in model_group])) else: for model_group,master_group in zip(model_params,master_params): for model, master in zip(model_group, master_group): if model.grad is not None: if master.grad is None: master.grad = master.data.new(*master.data.size()) master.grad.data.copy_(model.grad.data) else: master.grad = None
[ "def", "model_g2master_g", "(", "model_params", ":", "Sequence", "[", "Tensor", "]", ",", "master_params", ":", "Sequence", "[", "Tensor", "]", ",", "flat_master", ":", "bool", "=", "False", ")", "->", "None", ":", "if", "flat_master", ":", "for", "model_group", ",", "master_group", "in", "zip", "(", "model_params", ",", "master_params", ")", ":", "if", "len", "(", "master_group", ")", "!=", "0", ":", "if", "master_group", "[", "0", "]", ".", "grad", "is", "None", ":", "master_group", "[", "0", "]", ".", "grad", "=", "master_group", "[", "0", "]", ".", "data", ".", "new", "(", "*", "master_group", "[", "0", "]", ".", "data", ".", "size", "(", ")", ")", "master_group", "[", "0", "]", ".", "grad", ".", "data", ".", "copy_", "(", "parameters_to_vector", "(", "[", "p", ".", "grad", ".", "data", ".", "float", "(", ")", "for", "p", "in", "model_group", "]", ")", ")", "else", ":", "for", "model_group", ",", "master_group", "in", "zip", "(", "model_params", ",", "master_params", ")", ":", "for", "model", ",", "master", "in", "zip", "(", "model_group", ",", "master_group", ")", ":", "if", "model", ".", "grad", "is", "not", "None", ":", "if", "master", ".", "grad", "is", "None", ":", "master", ".", "grad", "=", "master", ".", "data", ".", "new", "(", "*", "master", ".", "data", ".", "size", "(", ")", ")", "master", ".", "grad", ".", "data", ".", "copy_", "(", "model", ".", "grad", ".", "data", ")", "else", ":", "master", ".", "grad", "=", "None" ]
Copy the `model_params` gradients to `master_params` for the optimizer step.
[ "Copy", "the", "model_params", "gradients", "to", "master_params", "for", "the", "optimizer", "step", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/callbacks/fp16.py#L30-L43
20,850
fastai/fastai
fastai/callbacks/fp16.py
master2model
def master2model(model_params:Sequence[Tensor], master_params:Sequence[Tensor], flat_master:bool=False)->None: "Copy `master_params` to `model_params`." if flat_master: for model_group,master_group in zip(model_params,master_params): if len(model_group) != 0: for model, master in zip(model_group, _unflatten_dense_tensors(master_group[0].data, model_group)): model.data.copy_(master) else: for model_group,master_group in zip(model_params,master_params): for model, master in zip(model_group, master_group): model.data.copy_(master.data)
python
def master2model(model_params:Sequence[Tensor], master_params:Sequence[Tensor], flat_master:bool=False)->None: "Copy `master_params` to `model_params`." if flat_master: for model_group,master_group in zip(model_params,master_params): if len(model_group) != 0: for model, master in zip(model_group, _unflatten_dense_tensors(master_group[0].data, model_group)): model.data.copy_(master) else: for model_group,master_group in zip(model_params,master_params): for model, master in zip(model_group, master_group): model.data.copy_(master.data)
[ "def", "master2model", "(", "model_params", ":", "Sequence", "[", "Tensor", "]", ",", "master_params", ":", "Sequence", "[", "Tensor", "]", ",", "flat_master", ":", "bool", "=", "False", ")", "->", "None", ":", "if", "flat_master", ":", "for", "model_group", ",", "master_group", "in", "zip", "(", "model_params", ",", "master_params", ")", ":", "if", "len", "(", "model_group", ")", "!=", "0", ":", "for", "model", ",", "master", "in", "zip", "(", "model_group", ",", "_unflatten_dense_tensors", "(", "master_group", "[", "0", "]", ".", "data", ",", "model_group", ")", ")", ":", "model", ".", "data", ".", "copy_", "(", "master", ")", "else", ":", "for", "model_group", ",", "master_group", "in", "zip", "(", "model_params", ",", "master_params", ")", ":", "for", "model", ",", "master", "in", "zip", "(", "model_group", ",", "master_group", ")", ":", "model", ".", "data", ".", "copy_", "(", "master", ".", "data", ")" ]
Copy `master_params` to `model_params`.
[ "Copy", "master_params", "to", "model_params", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/callbacks/fp16.py#L45-L54
20,851
fastai/fastai
fastai/callbacks/fp16.py
MixedPrecision.on_train_begin
def on_train_begin(self, **kwargs:Any)->None: "Prepare the master model." #Get a copy of the model params in FP32 self.model_params, self.master_params = get_master(self.learn.layer_groups, self.flat_master) #Changes the optimizer so that the optimization step is done in FP32. new_opt = self.learn.opt.new_with_params(self.master_params) if self.opt is not None: self.opt.lr,self.opt.wd = self.learn.opt.lr,self.learn.opt.wd new_opt.load_state_dict(self.opt) self.learn.opt.opt = new_opt.opt self.noskip = 0
python
def on_train_begin(self, **kwargs:Any)->None: "Prepare the master model." #Get a copy of the model params in FP32 self.model_params, self.master_params = get_master(self.learn.layer_groups, self.flat_master) #Changes the optimizer so that the optimization step is done in FP32. new_opt = self.learn.opt.new_with_params(self.master_params) if self.opt is not None: self.opt.lr,self.opt.wd = self.learn.opt.lr,self.learn.opt.wd new_opt.load_state_dict(self.opt) self.learn.opt.opt = new_opt.opt self.noskip = 0
[ "def", "on_train_begin", "(", "self", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "None", ":", "#Get a copy of the model params in FP32", "self", ".", "model_params", ",", "self", ".", "master_params", "=", "get_master", "(", "self", ".", "learn", ".", "layer_groups", ",", "self", ".", "flat_master", ")", "#Changes the optimizer so that the optimization step is done in FP32.", "new_opt", "=", "self", ".", "learn", ".", "opt", ".", "new_with_params", "(", "self", ".", "master_params", ")", "if", "self", ".", "opt", "is", "not", "None", ":", "self", ".", "opt", ".", "lr", ",", "self", ".", "opt", ".", "wd", "=", "self", ".", "learn", ".", "opt", ".", "lr", ",", "self", ".", "learn", ".", "opt", ".", "wd", "new_opt", ".", "load_state_dict", "(", "self", ".", "opt", ")", "self", ".", "learn", ".", "opt", ".", "opt", "=", "new_opt", ".", "opt", "self", ".", "noskip", "=", "0" ]
Prepare the master model.
[ "Prepare", "the", "master", "model", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/callbacks/fp16.py#L76-L86
20,852
fastai/fastai
fastai/callbacks/fp16.py
MixedPrecision.on_backward_begin
def on_backward_begin(self, last_loss:Rank0Tensor, **kwargs:Any) -> Rank0Tensor: "Scale gradients up by `self.loss_scale` to prevent underflow." #To avoid gradient underflow, we scale the gradients ret_loss = last_loss * self.loss_scale return {'last_loss': ret_loss}
python
def on_backward_begin(self, last_loss:Rank0Tensor, **kwargs:Any) -> Rank0Tensor: "Scale gradients up by `self.loss_scale` to prevent underflow." #To avoid gradient underflow, we scale the gradients ret_loss = last_loss * self.loss_scale return {'last_loss': ret_loss}
[ "def", "on_backward_begin", "(", "self", ",", "last_loss", ":", "Rank0Tensor", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "Rank0Tensor", ":", "#To avoid gradient underflow, we scale the gradients", "ret_loss", "=", "last_loss", "*", "self", ".", "loss_scale", "return", "{", "'last_loss'", ":", "ret_loss", "}" ]
Scale gradients up by `self.loss_scale` to prevent underflow.
[ "Scale", "gradients", "up", "by", "self", ".", "loss_scale", "to", "prevent", "underflow", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/callbacks/fp16.py#L92-L96
20,853
fastai/fastai
fastai/callbacks/fp16.py
MixedPrecision.on_backward_end
def on_backward_end(self, **kwargs:Any)->None: "Convert the gradients back to FP32 and divide them by the scale." if self.dynamic and grad_overflow(self.model_params) and self.loss_scale > 1: self.loss_scale /= 2 self.noskip = 0 #The step will be skipped since we don't update the master grads so they are all None or zero else: model_g2master_g(self.model_params, self.master_params, self.flat_master) for group in self.master_params: for param in group: if param.grad is not None: param.grad.div_(self.loss_scale) if self.clip is not None: for group in self.master_params: nn.utils.clip_grad_norm_(group, self.clip) if not self.dynamic: return self.noskip += 1 if self.noskip >= self.max_noskip and self.loss_scale < self.max_scale: self.loss_scale *= 2 self.noskip = 0
python
def on_backward_end(self, **kwargs:Any)->None: "Convert the gradients back to FP32 and divide them by the scale." if self.dynamic and grad_overflow(self.model_params) and self.loss_scale > 1: self.loss_scale /= 2 self.noskip = 0 #The step will be skipped since we don't update the master grads so they are all None or zero else: model_g2master_g(self.model_params, self.master_params, self.flat_master) for group in self.master_params: for param in group: if param.grad is not None: param.grad.div_(self.loss_scale) if self.clip is not None: for group in self.master_params: nn.utils.clip_grad_norm_(group, self.clip) if not self.dynamic: return self.noskip += 1 if self.noskip >= self.max_noskip and self.loss_scale < self.max_scale: self.loss_scale *= 2 self.noskip = 0
[ "def", "on_backward_end", "(", "self", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "None", ":", "if", "self", ".", "dynamic", "and", "grad_overflow", "(", "self", ".", "model_params", ")", "and", "self", ".", "loss_scale", ">", "1", ":", "self", ".", "loss_scale", "/=", "2", "self", ".", "noskip", "=", "0", "#The step will be skipped since we don't update the master grads so they are all None or zero", "else", ":", "model_g2master_g", "(", "self", ".", "model_params", ",", "self", ".", "master_params", ",", "self", ".", "flat_master", ")", "for", "group", "in", "self", ".", "master_params", ":", "for", "param", "in", "group", ":", "if", "param", ".", "grad", "is", "not", "None", ":", "param", ".", "grad", ".", "div_", "(", "self", ".", "loss_scale", ")", "if", "self", ".", "clip", "is", "not", "None", ":", "for", "group", "in", "self", ".", "master_params", ":", "nn", ".", "utils", ".", "clip_grad_norm_", "(", "group", ",", "self", ".", "clip", ")", "if", "not", "self", ".", "dynamic", ":", "return", "self", ".", "noskip", "+=", "1", "if", "self", ".", "noskip", ">=", "self", ".", "max_noskip", "and", "self", ".", "loss_scale", "<", "self", ".", "max_scale", ":", "self", ".", "loss_scale", "*=", "2", "self", ".", "noskip", "=", "0" ]
Convert the gradients back to FP32 and divide them by the scale.
[ "Convert", "the", "gradients", "back", "to", "FP32", "and", "divide", "them", "by", "the", "scale", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/callbacks/fp16.py#L98-L115
20,854
fastai/fastai
fastai/callbacks/fp16.py
MixedPrecision.on_step_end
def on_step_end(self, **kwargs:Any)->None: "Update the params from master to model and zero grad." #Zeros the gradients of the model since the optimizer is disconnected. self.learn.model.zero_grad() #Update the params from master to model. master2model(self.model_params, self.master_params, self.flat_master)
python
def on_step_end(self, **kwargs:Any)->None: "Update the params from master to model and zero grad." #Zeros the gradients of the model since the optimizer is disconnected. self.learn.model.zero_grad() #Update the params from master to model. master2model(self.model_params, self.master_params, self.flat_master)
[ "def", "on_step_end", "(", "self", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "None", ":", "#Zeros the gradients of the model since the optimizer is disconnected.", "self", ".", "learn", ".", "model", ".", "zero_grad", "(", ")", "#Update the params from master to model.", "master2model", "(", "self", ".", "model_params", ",", "self", ".", "master_params", ",", "self", ".", "flat_master", ")" ]
Update the params from master to model and zero grad.
[ "Update", "the", "params", "from", "master", "to", "model", "and", "zero", "grad", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/callbacks/fp16.py#L117-L122
20,855
fastai/fastai
old/fastai/transforms.py
dihedral
def dihedral(x, dih): """ Perform any of 8 permutations of 90-degrees rotations or flips for image x. """ x = np.rot90(x, dih%4) return x if dih<4 else np.fliplr(x)
python
def dihedral(x, dih): """ Perform any of 8 permutations of 90-degrees rotations or flips for image x. """ x = np.rot90(x, dih%4) return x if dih<4 else np.fliplr(x)
[ "def", "dihedral", "(", "x", ",", "dih", ")", ":", "x", "=", "np", ".", "rot90", "(", "x", ",", "dih", "%", "4", ")", "return", "x", "if", "dih", "<", "4", "else", "np", ".", "fliplr", "(", "x", ")" ]
Perform any of 8 permutations of 90-degrees rotations or flips for image x.
[ "Perform", "any", "of", "8", "permutations", "of", "90", "-", "degrees", "rotations", "or", "flips", "for", "image", "x", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/old/fastai/transforms.py#L33-L36
20,856
fastai/fastai
old/fastai/transforms.py
lighting
def lighting(im, b, c): """ Adjust image balance and contrast """ if b==0 and c==1: return im mu = np.average(im) return np.clip((im-mu)*c+mu+b,0.,1.).astype(np.float32)
python
def lighting(im, b, c): """ Adjust image balance and contrast """ if b==0 and c==1: return im mu = np.average(im) return np.clip((im-mu)*c+mu+b,0.,1.).astype(np.float32)
[ "def", "lighting", "(", "im", ",", "b", ",", "c", ")", ":", "if", "b", "==", "0", "and", "c", "==", "1", ":", "return", "im", "mu", "=", "np", ".", "average", "(", "im", ")", "return", "np", ".", "clip", "(", "(", "im", "-", "mu", ")", "*", "c", "+", "mu", "+", "b", ",", "0.", ",", "1.", ")", ".", "astype", "(", "np", ".", "float32", ")" ]
Adjust image balance and contrast
[ "Adjust", "image", "balance", "and", "contrast" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/old/fastai/transforms.py#L38-L42
20,857
fastai/fastai
old/fastai/transforms.py
scale_to
def scale_to(x, ratio, targ): '''Calculate dimension of an image during scaling with aspect ratio''' return max(math.floor(x*ratio), targ)
python
def scale_to(x, ratio, targ): '''Calculate dimension of an image during scaling with aspect ratio''' return max(math.floor(x*ratio), targ)
[ "def", "scale_to", "(", "x", ",", "ratio", ",", "targ", ")", ":", "return", "max", "(", "math", ".", "floor", "(", "x", "*", "ratio", ")", ",", "targ", ")" ]
Calculate dimension of an image during scaling with aspect ratio
[ "Calculate", "dimension", "of", "an", "image", "during", "scaling", "with", "aspect", "ratio" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/old/fastai/transforms.py#L112-L114
20,858
fastai/fastai
old/fastai/transforms.py
to_bb
def to_bb(YY, y="deprecated"): """Convert mask YY to a bounding box, assumes 0 as background nonzero object""" cols,rows = np.nonzero(YY) if len(cols)==0: return np.zeros(4, dtype=np.float32) top_row = np.min(rows) left_col = np.min(cols) bottom_row = np.max(rows) right_col = np.max(cols) return np.array([left_col, top_row, right_col, bottom_row], dtype=np.float32)
python
def to_bb(YY, y="deprecated"): """Convert mask YY to a bounding box, assumes 0 as background nonzero object""" cols,rows = np.nonzero(YY) if len(cols)==0: return np.zeros(4, dtype=np.float32) top_row = np.min(rows) left_col = np.min(cols) bottom_row = np.max(rows) right_col = np.max(cols) return np.array([left_col, top_row, right_col, bottom_row], dtype=np.float32)
[ "def", "to_bb", "(", "YY", ",", "y", "=", "\"deprecated\"", ")", ":", "cols", ",", "rows", "=", "np", ".", "nonzero", "(", "YY", ")", "if", "len", "(", "cols", ")", "==", "0", ":", "return", "np", ".", "zeros", "(", "4", ",", "dtype", "=", "np", ".", "float32", ")", "top_row", "=", "np", ".", "min", "(", "rows", ")", "left_col", "=", "np", ".", "min", "(", "cols", ")", "bottom_row", "=", "np", ".", "max", "(", "rows", ")", "right_col", "=", "np", ".", "max", "(", "cols", ")", "return", "np", ".", "array", "(", "[", "left_col", ",", "top_row", ",", "right_col", ",", "bottom_row", "]", ",", "dtype", "=", "np", ".", "float32", ")" ]
Convert mask YY to a bounding box, assumes 0 as background nonzero object
[ "Convert", "mask", "YY", "to", "a", "bounding", "box", "assumes", "0", "as", "background", "nonzero", "object" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/old/fastai/transforms.py#L183-L191
20,859
fastai/fastai
old/fastai/transforms.py
image_gen
def image_gen(normalizer, denorm, sz, tfms=None, max_zoom=None, pad=0, crop_type=None, tfm_y=None, sz_y=None, pad_mode=cv2.BORDER_REFLECT, scale=None): """ Generate a standard set of transformations Arguments --------- normalizer : image normalizing function denorm : image denormalizing function sz : size, sz_y = sz if not specified. tfms : iterable collection of transformation functions max_zoom : float, maximum zoom pad : int, padding on top, left, right and bottom crop_type : crop type tfm_y : y axis specific transformations sz_y : y size, height pad_mode : cv2 padding style: repeat, reflect, etc. Returns ------- type : ``Transforms`` transformer for specified image operations. See Also -------- Transforms: the transformer object returned by this function """ if tfm_y is None: tfm_y=TfmType.NO if tfms is None: tfms=[] elif not isinstance(tfms, collections.Iterable): tfms=[tfms] if sz_y is None: sz_y = sz if scale is None: scale = [RandomScale(sz, max_zoom, tfm_y=tfm_y, sz_y=sz_y) if max_zoom is not None else Scale(sz, tfm_y, sz_y=sz_y)] elif not is_listy(scale): scale = [scale] if pad: scale.append(AddPadding(pad, mode=pad_mode)) if crop_type!=CropType.GOOGLENET: tfms=scale+tfms return Transforms(sz, tfms, normalizer, denorm, crop_type, tfm_y=tfm_y, sz_y=sz_y)
python
def image_gen(normalizer, denorm, sz, tfms=None, max_zoom=None, pad=0, crop_type=None, tfm_y=None, sz_y=None, pad_mode=cv2.BORDER_REFLECT, scale=None): """ Generate a standard set of transformations Arguments --------- normalizer : image normalizing function denorm : image denormalizing function sz : size, sz_y = sz if not specified. tfms : iterable collection of transformation functions max_zoom : float, maximum zoom pad : int, padding on top, left, right and bottom crop_type : crop type tfm_y : y axis specific transformations sz_y : y size, height pad_mode : cv2 padding style: repeat, reflect, etc. Returns ------- type : ``Transforms`` transformer for specified image operations. See Also -------- Transforms: the transformer object returned by this function """ if tfm_y is None: tfm_y=TfmType.NO if tfms is None: tfms=[] elif not isinstance(tfms, collections.Iterable): tfms=[tfms] if sz_y is None: sz_y = sz if scale is None: scale = [RandomScale(sz, max_zoom, tfm_y=tfm_y, sz_y=sz_y) if max_zoom is not None else Scale(sz, tfm_y, sz_y=sz_y)] elif not is_listy(scale): scale = [scale] if pad: scale.append(AddPadding(pad, mode=pad_mode)) if crop_type!=CropType.GOOGLENET: tfms=scale+tfms return Transforms(sz, tfms, normalizer, denorm, crop_type, tfm_y=tfm_y, sz_y=sz_y)
[ "def", "image_gen", "(", "normalizer", ",", "denorm", ",", "sz", ",", "tfms", "=", "None", ",", "max_zoom", "=", "None", ",", "pad", "=", "0", ",", "crop_type", "=", "None", ",", "tfm_y", "=", "None", ",", "sz_y", "=", "None", ",", "pad_mode", "=", "cv2", ".", "BORDER_REFLECT", ",", "scale", "=", "None", ")", ":", "if", "tfm_y", "is", "None", ":", "tfm_y", "=", "TfmType", ".", "NO", "if", "tfms", "is", "None", ":", "tfms", "=", "[", "]", "elif", "not", "isinstance", "(", "tfms", ",", "collections", ".", "Iterable", ")", ":", "tfms", "=", "[", "tfms", "]", "if", "sz_y", "is", "None", ":", "sz_y", "=", "sz", "if", "scale", "is", "None", ":", "scale", "=", "[", "RandomScale", "(", "sz", ",", "max_zoom", ",", "tfm_y", "=", "tfm_y", ",", "sz_y", "=", "sz_y", ")", "if", "max_zoom", "is", "not", "None", "else", "Scale", "(", "sz", ",", "tfm_y", ",", "sz_y", "=", "sz_y", ")", "]", "elif", "not", "is_listy", "(", "scale", ")", ":", "scale", "=", "[", "scale", "]", "if", "pad", ":", "scale", ".", "append", "(", "AddPadding", "(", "pad", ",", "mode", "=", "pad_mode", ")", ")", "if", "crop_type", "!=", "CropType", ".", "GOOGLENET", ":", "tfms", "=", "scale", "+", "tfms", "return", "Transforms", "(", "sz", ",", "tfms", ",", "normalizer", ",", "denorm", ",", "crop_type", ",", "tfm_y", "=", "tfm_y", ",", "sz_y", "=", "sz_y", ")" ]
Generate a standard set of transformations Arguments --------- normalizer : image normalizing function denorm : image denormalizing function sz : size, sz_y = sz if not specified. tfms : iterable collection of transformation functions max_zoom : float, maximum zoom pad : int, padding on top, left, right and bottom crop_type : crop type tfm_y : y axis specific transformations sz_y : y size, height pad_mode : cv2 padding style: repeat, reflect, etc. Returns ------- type : ``Transforms`` transformer for specified image operations. See Also -------- Transforms: the transformer object returned by this function
[ "Generate", "a", "standard", "set", "of", "transformations" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/old/fastai/transforms.py#L652-L700
20,860
fastai/fastai
old/fastai/transforms.py
tfms_from_stats
def tfms_from_stats(stats, sz, aug_tfms=None, max_zoom=None, pad=0, crop_type=CropType.RANDOM, tfm_y=None, sz_y=None, pad_mode=cv2.BORDER_REFLECT, norm_y=True, scale=None): """ Given the statistics of the training image sets, returns separate training and validation transform functions """ if aug_tfms is None: aug_tfms=[] tfm_norm = Normalize(*stats, tfm_y=tfm_y if norm_y else TfmType.NO) if stats is not None else None tfm_denorm = Denormalize(*stats) if stats is not None else None val_crop = CropType.CENTER if crop_type in (CropType.RANDOM,CropType.GOOGLENET) else crop_type val_tfm = image_gen(tfm_norm, tfm_denorm, sz, pad=pad, crop_type=val_crop, tfm_y=tfm_y, sz_y=sz_y, scale=scale) trn_tfm = image_gen(tfm_norm, tfm_denorm, sz, pad=pad, crop_type=crop_type, tfm_y=tfm_y, sz_y=sz_y, tfms=aug_tfms, max_zoom=max_zoom, pad_mode=pad_mode, scale=scale) return trn_tfm, val_tfm
python
def tfms_from_stats(stats, sz, aug_tfms=None, max_zoom=None, pad=0, crop_type=CropType.RANDOM, tfm_y=None, sz_y=None, pad_mode=cv2.BORDER_REFLECT, norm_y=True, scale=None): """ Given the statistics of the training image sets, returns separate training and validation transform functions """ if aug_tfms is None: aug_tfms=[] tfm_norm = Normalize(*stats, tfm_y=tfm_y if norm_y else TfmType.NO) if stats is not None else None tfm_denorm = Denormalize(*stats) if stats is not None else None val_crop = CropType.CENTER if crop_type in (CropType.RANDOM,CropType.GOOGLENET) else crop_type val_tfm = image_gen(tfm_norm, tfm_denorm, sz, pad=pad, crop_type=val_crop, tfm_y=tfm_y, sz_y=sz_y, scale=scale) trn_tfm = image_gen(tfm_norm, tfm_denorm, sz, pad=pad, crop_type=crop_type, tfm_y=tfm_y, sz_y=sz_y, tfms=aug_tfms, max_zoom=max_zoom, pad_mode=pad_mode, scale=scale) return trn_tfm, val_tfm
[ "def", "tfms_from_stats", "(", "stats", ",", "sz", ",", "aug_tfms", "=", "None", ",", "max_zoom", "=", "None", ",", "pad", "=", "0", ",", "crop_type", "=", "CropType", ".", "RANDOM", ",", "tfm_y", "=", "None", ",", "sz_y", "=", "None", ",", "pad_mode", "=", "cv2", ".", "BORDER_REFLECT", ",", "norm_y", "=", "True", ",", "scale", "=", "None", ")", ":", "if", "aug_tfms", "is", "None", ":", "aug_tfms", "=", "[", "]", "tfm_norm", "=", "Normalize", "(", "*", "stats", ",", "tfm_y", "=", "tfm_y", "if", "norm_y", "else", "TfmType", ".", "NO", ")", "if", "stats", "is", "not", "None", "else", "None", "tfm_denorm", "=", "Denormalize", "(", "*", "stats", ")", "if", "stats", "is", "not", "None", "else", "None", "val_crop", "=", "CropType", ".", "CENTER", "if", "crop_type", "in", "(", "CropType", ".", "RANDOM", ",", "CropType", ".", "GOOGLENET", ")", "else", "crop_type", "val_tfm", "=", "image_gen", "(", "tfm_norm", ",", "tfm_denorm", ",", "sz", ",", "pad", "=", "pad", ",", "crop_type", "=", "val_crop", ",", "tfm_y", "=", "tfm_y", ",", "sz_y", "=", "sz_y", ",", "scale", "=", "scale", ")", "trn_tfm", "=", "image_gen", "(", "tfm_norm", ",", "tfm_denorm", ",", "sz", ",", "pad", "=", "pad", ",", "crop_type", "=", "crop_type", ",", "tfm_y", "=", "tfm_y", ",", "sz_y", "=", "sz_y", ",", "tfms", "=", "aug_tfms", ",", "max_zoom", "=", "max_zoom", ",", "pad_mode", "=", "pad_mode", ",", "scale", "=", "scale", ")", "return", "trn_tfm", ",", "val_tfm" ]
Given the statistics of the training image sets, returns separate training and validation transform functions
[ "Given", "the", "statistics", "of", "the", "training", "image", "sets", "returns", "separate", "training", "and", "validation", "transform", "functions" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/old/fastai/transforms.py#L716-L728
20,861
fastai/fastai
fastai/vision/data.py
get_image_files
def get_image_files(c:PathOrStr, check_ext:bool=True, recurse=False)->FilePathList: "Return list of files in `c` that are images. `check_ext` will filter to `image_extensions`." return get_files(c, extensions=(image_extensions if check_ext else None), recurse=recurse)
python
def get_image_files(c:PathOrStr, check_ext:bool=True, recurse=False)->FilePathList: "Return list of files in `c` that are images. `check_ext` will filter to `image_extensions`." return get_files(c, extensions=(image_extensions if check_ext else None), recurse=recurse)
[ "def", "get_image_files", "(", "c", ":", "PathOrStr", ",", "check_ext", ":", "bool", "=", "True", ",", "recurse", "=", "False", ")", "->", "FilePathList", ":", "return", "get_files", "(", "c", ",", "extensions", "=", "(", "image_extensions", "if", "check_ext", "else", "None", ")", ",", "recurse", "=", "recurse", ")" ]
Return list of files in `c` that are images. `check_ext` will filter to `image_extensions`.
[ "Return", "list", "of", "files", "in", "c", "that", "are", "images", ".", "check_ext", "will", "filter", "to", "image_extensions", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/vision/data.py#L19-L21
20,862
fastai/fastai
fastai/vision/data.py
bb_pad_collate
def bb_pad_collate(samples:BatchSamples, pad_idx:int=0) -> Tuple[FloatTensor, Tuple[LongTensor, LongTensor]]: "Function that collect `samples` of labelled bboxes and adds padding with `pad_idx`." if isinstance(samples[0][1], int): return data_collate(samples) max_len = max([len(s[1].data[1]) for s in samples]) bboxes = torch.zeros(len(samples), max_len, 4) labels = torch.zeros(len(samples), max_len).long() + pad_idx imgs = [] for i,s in enumerate(samples): imgs.append(s[0].data[None]) bbs, lbls = s[1].data if not (bbs.nelement() == 0): bboxes[i,-len(lbls):] = bbs labels[i,-len(lbls):] = tensor(lbls) return torch.cat(imgs,0), (bboxes,labels)
python
def bb_pad_collate(samples:BatchSamples, pad_idx:int=0) -> Tuple[FloatTensor, Tuple[LongTensor, LongTensor]]: "Function that collect `samples` of labelled bboxes and adds padding with `pad_idx`." if isinstance(samples[0][1], int): return data_collate(samples) max_len = max([len(s[1].data[1]) for s in samples]) bboxes = torch.zeros(len(samples), max_len, 4) labels = torch.zeros(len(samples), max_len).long() + pad_idx imgs = [] for i,s in enumerate(samples): imgs.append(s[0].data[None]) bbs, lbls = s[1].data if not (bbs.nelement() == 0): bboxes[i,-len(lbls):] = bbs labels[i,-len(lbls):] = tensor(lbls) return torch.cat(imgs,0), (bboxes,labels)
[ "def", "bb_pad_collate", "(", "samples", ":", "BatchSamples", ",", "pad_idx", ":", "int", "=", "0", ")", "->", "Tuple", "[", "FloatTensor", ",", "Tuple", "[", "LongTensor", ",", "LongTensor", "]", "]", ":", "if", "isinstance", "(", "samples", "[", "0", "]", "[", "1", "]", ",", "int", ")", ":", "return", "data_collate", "(", "samples", ")", "max_len", "=", "max", "(", "[", "len", "(", "s", "[", "1", "]", ".", "data", "[", "1", "]", ")", "for", "s", "in", "samples", "]", ")", "bboxes", "=", "torch", ".", "zeros", "(", "len", "(", "samples", ")", ",", "max_len", ",", "4", ")", "labels", "=", "torch", ".", "zeros", "(", "len", "(", "samples", ")", ",", "max_len", ")", ".", "long", "(", ")", "+", "pad_idx", "imgs", "=", "[", "]", "for", "i", ",", "s", "in", "enumerate", "(", "samples", ")", ":", "imgs", ".", "append", "(", "s", "[", "0", "]", ".", "data", "[", "None", "]", ")", "bbs", ",", "lbls", "=", "s", "[", "1", "]", ".", "data", "if", "not", "(", "bbs", ".", "nelement", "(", ")", "==", "0", ")", ":", "bboxes", "[", "i", ",", "-", "len", "(", "lbls", ")", ":", "]", "=", "bbs", "labels", "[", "i", ",", "-", "len", "(", "lbls", ")", ":", "]", "=", "tensor", "(", "lbls", ")", "return", "torch", ".", "cat", "(", "imgs", ",", "0", ")", ",", "(", "bboxes", ",", "labels", ")" ]
Function that collect `samples` of labelled bboxes and adds padding with `pad_idx`.
[ "Function", "that", "collect", "samples", "of", "labelled", "bboxes", "and", "adds", "padding", "with", "pad_idx", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/vision/data.py#L40-L53
20,863
fastai/fastai
fastai/vision/data.py
normalize
def normalize(x:TensorImage, mean:FloatTensor,std:FloatTensor)->TensorImage: "Normalize `x` with `mean` and `std`." return (x-mean[...,None,None]) / std[...,None,None]
python
def normalize(x:TensorImage, mean:FloatTensor,std:FloatTensor)->TensorImage: "Normalize `x` with `mean` and `std`." return (x-mean[...,None,None]) / std[...,None,None]
[ "def", "normalize", "(", "x", ":", "TensorImage", ",", "mean", ":", "FloatTensor", ",", "std", ":", "FloatTensor", ")", "->", "TensorImage", ":", "return", "(", "x", "-", "mean", "[", "...", ",", "None", ",", "None", "]", ")", "/", "std", "[", "...", ",", "None", ",", "None", "]" ]
Normalize `x` with `mean` and `std`.
[ "Normalize", "x", "with", "mean", "and", "std", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/vision/data.py#L55-L57
20,864
fastai/fastai
fastai/vision/data.py
denormalize
def denormalize(x:TensorImage, mean:FloatTensor,std:FloatTensor, do_x:bool=True)->TensorImage: "Denormalize `x` with `mean` and `std`." return x.cpu().float()*std[...,None,None] + mean[...,None,None] if do_x else x.cpu()
python
def denormalize(x:TensorImage, mean:FloatTensor,std:FloatTensor, do_x:bool=True)->TensorImage: "Denormalize `x` with `mean` and `std`." return x.cpu().float()*std[...,None,None] + mean[...,None,None] if do_x else x.cpu()
[ "def", "denormalize", "(", "x", ":", "TensorImage", ",", "mean", ":", "FloatTensor", ",", "std", ":", "FloatTensor", ",", "do_x", ":", "bool", "=", "True", ")", "->", "TensorImage", ":", "return", "x", ".", "cpu", "(", ")", ".", "float", "(", ")", "*", "std", "[", "...", ",", "None", ",", "None", "]", "+", "mean", "[", "...", ",", "None", ",", "None", "]", "if", "do_x", "else", "x", ".", "cpu", "(", ")" ]
Denormalize `x` with `mean` and `std`.
[ "Denormalize", "x", "with", "mean", "and", "std", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/vision/data.py#L59-L61
20,865
fastai/fastai
fastai/vision/data.py
_normalize_batch
def _normalize_batch(b:Tuple[Tensor,Tensor], mean:FloatTensor, std:FloatTensor, do_x:bool=True, do_y:bool=False)->Tuple[Tensor,Tensor]: "`b` = `x`,`y` - normalize `x` array of imgs and `do_y` optionally `y`." x,y = b mean,std = mean.to(x.device),std.to(x.device) if do_x: x = normalize(x,mean,std) if do_y and len(y.shape) == 4: y = normalize(y,mean,std) return x,y
python
def _normalize_batch(b:Tuple[Tensor,Tensor], mean:FloatTensor, std:FloatTensor, do_x:bool=True, do_y:bool=False)->Tuple[Tensor,Tensor]: "`b` = `x`,`y` - normalize `x` array of imgs and `do_y` optionally `y`." x,y = b mean,std = mean.to(x.device),std.to(x.device) if do_x: x = normalize(x,mean,std) if do_y and len(y.shape) == 4: y = normalize(y,mean,std) return x,y
[ "def", "_normalize_batch", "(", "b", ":", "Tuple", "[", "Tensor", ",", "Tensor", "]", ",", "mean", ":", "FloatTensor", ",", "std", ":", "FloatTensor", ",", "do_x", ":", "bool", "=", "True", ",", "do_y", ":", "bool", "=", "False", ")", "->", "Tuple", "[", "Tensor", ",", "Tensor", "]", ":", "x", ",", "y", "=", "b", "mean", ",", "std", "=", "mean", ".", "to", "(", "x", ".", "device", ")", ",", "std", ".", "to", "(", "x", ".", "device", ")", "if", "do_x", ":", "x", "=", "normalize", "(", "x", ",", "mean", ",", "std", ")", "if", "do_y", "and", "len", "(", "y", ".", "shape", ")", "==", "4", ":", "y", "=", "normalize", "(", "y", ",", "mean", ",", "std", ")", "return", "x", ",", "y" ]
`b` = `x`,`y` - normalize `x` array of imgs and `do_y` optionally `y`.
[ "b", "=", "x", "y", "-", "normalize", "x", "array", "of", "imgs", "and", "do_y", "optionally", "y", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/vision/data.py#L63-L69
20,866
fastai/fastai
fastai/vision/data.py
channel_view
def channel_view(x:Tensor)->Tensor: "Make channel the first axis of `x` and flatten remaining axes" return x.transpose(0,1).contiguous().view(x.shape[1],-1)
python
def channel_view(x:Tensor)->Tensor: "Make channel the first axis of `x` and flatten remaining axes" return x.transpose(0,1).contiguous().view(x.shape[1],-1)
[ "def", "channel_view", "(", "x", ":", "Tensor", ")", "->", "Tensor", ":", "return", "x", ".", "transpose", "(", "0", ",", "1", ")", ".", "contiguous", "(", ")", ".", "view", "(", "x", ".", "shape", "[", "1", "]", ",", "-", "1", ")" ]
Make channel the first axis of `x` and flatten remaining axes
[ "Make", "channel", "the", "first", "axis", "of", "x", "and", "flatten", "remaining", "axes" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/vision/data.py#L81-L83
20,867
fastai/fastai
fastai/vision/data.py
download_images
def download_images(urls:Collection[str], dest:PathOrStr, max_pics:int=1000, max_workers:int=8, timeout=4): "Download images listed in text file `urls` to path `dest`, at most `max_pics`" urls = open(urls).read().strip().split("\n")[:max_pics] dest = Path(dest) dest.mkdir(exist_ok=True) parallel(partial(_download_image_inner, dest, timeout=timeout), urls, max_workers=max_workers)
python
def download_images(urls:Collection[str], dest:PathOrStr, max_pics:int=1000, max_workers:int=8, timeout=4): "Download images listed in text file `urls` to path `dest`, at most `max_pics`" urls = open(urls).read().strip().split("\n")[:max_pics] dest = Path(dest) dest.mkdir(exist_ok=True) parallel(partial(_download_image_inner, dest, timeout=timeout), urls, max_workers=max_workers)
[ "def", "download_images", "(", "urls", ":", "Collection", "[", "str", "]", ",", "dest", ":", "PathOrStr", ",", "max_pics", ":", "int", "=", "1000", ",", "max_workers", ":", "int", "=", "8", ",", "timeout", "=", "4", ")", ":", "urls", "=", "open", "(", "urls", ")", ".", "read", "(", ")", ".", "strip", "(", ")", ".", "split", "(", "\"\\n\"", ")", "[", ":", "max_pics", "]", "dest", "=", "Path", "(", "dest", ")", "dest", ".", "mkdir", "(", "exist_ok", "=", "True", ")", "parallel", "(", "partial", "(", "_download_image_inner", ",", "dest", ",", "timeout", "=", "timeout", ")", ",", "urls", ",", "max_workers", "=", "max_workers", ")" ]
Download images listed in text file `urls` to path `dest`, at most `max_pics`
[ "Download", "images", "listed", "in", "text", "file", "urls", "to", "path", "dest", "at", "most", "max_pics" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/vision/data.py#L191-L196
20,868
fastai/fastai
fastai/vision/data.py
verify_image
def verify_image(file:Path, idx:int, delete:bool, max_size:Union[int,Tuple[int,int]]=None, dest:Path=None, n_channels:int=3, interp=PIL.Image.BILINEAR, ext:str=None, img_format:str=None, resume:bool=False, **kwargs): "Check if the image in `file` exists, maybe resize it and copy it in `dest`." try: # deal with partially broken images as indicated by PIL warnings with warnings.catch_warnings(): warnings.filterwarnings('error') try: with open(file, 'rb') as img_file: PIL.Image.open(img_file) except Warning as w: if "Possibly corrupt EXIF data" in str(w): if delete: # green light to modify files print(f"{file}: Removing corrupt EXIF data") warnings.simplefilter("ignore") # save EXIF-cleaned up image, which happens automatically PIL.Image.open(file).save(file) else: # keep user's files intact print(f"{file}: Not removing corrupt EXIF data, pass `delete=True` to do that") else: warnings.warn(w) img = PIL.Image.open(file) imgarr = np.array(img) img_channels = 1 if len(imgarr.shape) == 2 else imgarr.shape[2] if (max_size is not None and (img.height > max_size or img.width > max_size)) or img_channels != n_channels: assert isinstance(dest, Path), "You should provide `dest` Path to save resized image" dest_fname = dest/file.name if ext is not None: dest_fname=dest_fname.with_suffix(ext) if resume and os.path.isfile(dest_fname): return if max_size is not None: new_sz = resize_to(img, max_size) img = img.resize(new_sz, resample=interp) if n_channels == 3: img = img.convert("RGB") img.save(dest_fname, img_format, **kwargs) except Exception as e: print(f'{e}') if delete: file.unlink()
python
def verify_image(file:Path, idx:int, delete:bool, max_size:Union[int,Tuple[int,int]]=None, dest:Path=None, n_channels:int=3, interp=PIL.Image.BILINEAR, ext:str=None, img_format:str=None, resume:bool=False, **kwargs): "Check if the image in `file` exists, maybe resize it and copy it in `dest`." try: # deal with partially broken images as indicated by PIL warnings with warnings.catch_warnings(): warnings.filterwarnings('error') try: with open(file, 'rb') as img_file: PIL.Image.open(img_file) except Warning as w: if "Possibly corrupt EXIF data" in str(w): if delete: # green light to modify files print(f"{file}: Removing corrupt EXIF data") warnings.simplefilter("ignore") # save EXIF-cleaned up image, which happens automatically PIL.Image.open(file).save(file) else: # keep user's files intact print(f"{file}: Not removing corrupt EXIF data, pass `delete=True` to do that") else: warnings.warn(w) img = PIL.Image.open(file) imgarr = np.array(img) img_channels = 1 if len(imgarr.shape) == 2 else imgarr.shape[2] if (max_size is not None and (img.height > max_size or img.width > max_size)) or img_channels != n_channels: assert isinstance(dest, Path), "You should provide `dest` Path to save resized image" dest_fname = dest/file.name if ext is not None: dest_fname=dest_fname.with_suffix(ext) if resume and os.path.isfile(dest_fname): return if max_size is not None: new_sz = resize_to(img, max_size) img = img.resize(new_sz, resample=interp) if n_channels == 3: img = img.convert("RGB") img.save(dest_fname, img_format, **kwargs) except Exception as e: print(f'{e}') if delete: file.unlink()
[ "def", "verify_image", "(", "file", ":", "Path", ",", "idx", ":", "int", ",", "delete", ":", "bool", ",", "max_size", ":", "Union", "[", "int", ",", "Tuple", "[", "int", ",", "int", "]", "]", "=", "None", ",", "dest", ":", "Path", "=", "None", ",", "n_channels", ":", "int", "=", "3", ",", "interp", "=", "PIL", ".", "Image", ".", "BILINEAR", ",", "ext", ":", "str", "=", "None", ",", "img_format", ":", "str", "=", "None", ",", "resume", ":", "bool", "=", "False", ",", "*", "*", "kwargs", ")", ":", "try", ":", "# deal with partially broken images as indicated by PIL warnings", "with", "warnings", ".", "catch_warnings", "(", ")", ":", "warnings", ".", "filterwarnings", "(", "'error'", ")", "try", ":", "with", "open", "(", "file", ",", "'rb'", ")", "as", "img_file", ":", "PIL", ".", "Image", ".", "open", "(", "img_file", ")", "except", "Warning", "as", "w", ":", "if", "\"Possibly corrupt EXIF data\"", "in", "str", "(", "w", ")", ":", "if", "delete", ":", "# green light to modify files", "print", "(", "f\"{file}: Removing corrupt EXIF data\"", ")", "warnings", ".", "simplefilter", "(", "\"ignore\"", ")", "# save EXIF-cleaned up image, which happens automatically", "PIL", ".", "Image", ".", "open", "(", "file", ")", ".", "save", "(", "file", ")", "else", ":", "# keep user's files intact", "print", "(", "f\"{file}: Not removing corrupt EXIF data, pass `delete=True` to do that\"", ")", "else", ":", "warnings", ".", "warn", "(", "w", ")", "img", "=", "PIL", ".", "Image", ".", "open", "(", "file", ")", "imgarr", "=", "np", ".", "array", "(", "img", ")", "img_channels", "=", "1", "if", "len", "(", "imgarr", ".", "shape", ")", "==", "2", "else", "imgarr", ".", "shape", "[", "2", "]", "if", "(", "max_size", "is", "not", "None", "and", "(", "img", ".", "height", ">", "max_size", "or", "img", ".", "width", ">", "max_size", ")", ")", "or", "img_channels", "!=", "n_channels", ":", "assert", "isinstance", "(", "dest", ",", "Path", ")", ",", "\"You should provide `dest` Path to save resized image\"", "dest_fname", "=", "dest", "/", "file", ".", "name", "if", "ext", "is", "not", "None", ":", "dest_fname", "=", "dest_fname", ".", "with_suffix", "(", "ext", ")", "if", "resume", "and", "os", ".", "path", ".", "isfile", "(", "dest_fname", ")", ":", "return", "if", "max_size", "is", "not", "None", ":", "new_sz", "=", "resize_to", "(", "img", ",", "max_size", ")", "img", "=", "img", ".", "resize", "(", "new_sz", ",", "resample", "=", "interp", ")", "if", "n_channels", "==", "3", ":", "img", "=", "img", ".", "convert", "(", "\"RGB\"", ")", "img", ".", "save", "(", "dest_fname", ",", "img_format", ",", "*", "*", "kwargs", ")", "except", "Exception", "as", "e", ":", "print", "(", "f'{e}'", ")", "if", "delete", ":", "file", ".", "unlink", "(", ")" ]
Check if the image in `file` exists, maybe resize it and copy it in `dest`.
[ "Check", "if", "the", "image", "in", "file", "exists", "maybe", "resize", "it", "and", "copy", "it", "in", "dest", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/vision/data.py#L205-L240
20,869
fastai/fastai
fastai/vision/data.py
verify_images
def verify_images(path:PathOrStr, delete:bool=True, max_workers:int=4, max_size:Union[int]=None, recurse:bool=False, dest:PathOrStr='.', n_channels:int=3, interp=PIL.Image.BILINEAR, ext:str=None, img_format:str=None, resume:bool=None, **kwargs): "Check if the images in `path` aren't broken, maybe resize them and copy it in `dest`." path = Path(path) if resume is None and dest == '.': resume=False dest = path/Path(dest) os.makedirs(dest, exist_ok=True) files = get_image_files(path, recurse=recurse) func = partial(verify_image, delete=delete, max_size=max_size, dest=dest, n_channels=n_channels, interp=interp, ext=ext, img_format=img_format, resume=resume, **kwargs) parallel(func, files, max_workers=max_workers)
python
def verify_images(path:PathOrStr, delete:bool=True, max_workers:int=4, max_size:Union[int]=None, recurse:bool=False, dest:PathOrStr='.', n_channels:int=3, interp=PIL.Image.BILINEAR, ext:str=None, img_format:str=None, resume:bool=None, **kwargs): "Check if the images in `path` aren't broken, maybe resize them and copy it in `dest`." path = Path(path) if resume is None and dest == '.': resume=False dest = path/Path(dest) os.makedirs(dest, exist_ok=True) files = get_image_files(path, recurse=recurse) func = partial(verify_image, delete=delete, max_size=max_size, dest=dest, n_channels=n_channels, interp=interp, ext=ext, img_format=img_format, resume=resume, **kwargs) parallel(func, files, max_workers=max_workers)
[ "def", "verify_images", "(", "path", ":", "PathOrStr", ",", "delete", ":", "bool", "=", "True", ",", "max_workers", ":", "int", "=", "4", ",", "max_size", ":", "Union", "[", "int", "]", "=", "None", ",", "recurse", ":", "bool", "=", "False", ",", "dest", ":", "PathOrStr", "=", "'.'", ",", "n_channels", ":", "int", "=", "3", ",", "interp", "=", "PIL", ".", "Image", ".", "BILINEAR", ",", "ext", ":", "str", "=", "None", ",", "img_format", ":", "str", "=", "None", ",", "resume", ":", "bool", "=", "None", ",", "*", "*", "kwargs", ")", ":", "path", "=", "Path", "(", "path", ")", "if", "resume", "is", "None", "and", "dest", "==", "'.'", ":", "resume", "=", "False", "dest", "=", "path", "/", "Path", "(", "dest", ")", "os", ".", "makedirs", "(", "dest", ",", "exist_ok", "=", "True", ")", "files", "=", "get_image_files", "(", "path", ",", "recurse", "=", "recurse", ")", "func", "=", "partial", "(", "verify_image", ",", "delete", "=", "delete", ",", "max_size", "=", "max_size", ",", "dest", "=", "dest", ",", "n_channels", "=", "n_channels", ",", "interp", "=", "interp", ",", "ext", "=", "ext", ",", "img_format", "=", "img_format", ",", "resume", "=", "resume", ",", "*", "*", "kwargs", ")", "parallel", "(", "func", ",", "files", ",", "max_workers", "=", "max_workers", ")" ]
Check if the images in `path` aren't broken, maybe resize them and copy it in `dest`.
[ "Check", "if", "the", "images", "in", "path", "aren", "t", "broken", "maybe", "resize", "them", "and", "copy", "it", "in", "dest", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/vision/data.py#L242-L253
20,870
fastai/fastai
fastai/vision/data.py
_presize
def _presize(self, size:int, val_xtra_size:int=32, scale:Tuple[float]=(0.08, 1.0), ratio:Tuple[float]=(0.75, 4./3.), interpolation:int=2): "Resize images to `size` using `RandomResizedCrop`, passing along `kwargs` to train transform" return self.pre_transform( tvt.RandomResizedCrop(size, scale=scale, ratio=ratio, interpolation=interpolation), [tvt.Resize(size+val_xtra_size), tvt.CenterCrop(size)])
python
def _presize(self, size:int, val_xtra_size:int=32, scale:Tuple[float]=(0.08, 1.0), ratio:Tuple[float]=(0.75, 4./3.), interpolation:int=2): "Resize images to `size` using `RandomResizedCrop`, passing along `kwargs` to train transform" return self.pre_transform( tvt.RandomResizedCrop(size, scale=scale, ratio=ratio, interpolation=interpolation), [tvt.Resize(size+val_xtra_size), tvt.CenterCrop(size)])
[ "def", "_presize", "(", "self", ",", "size", ":", "int", ",", "val_xtra_size", ":", "int", "=", "32", ",", "scale", ":", "Tuple", "[", "float", "]", "=", "(", "0.08", ",", "1.0", ")", ",", "ratio", ":", "Tuple", "[", "float", "]", "=", "(", "0.75", ",", "4.", "/", "3.", ")", ",", "interpolation", ":", "int", "=", "2", ")", ":", "return", "self", ".", "pre_transform", "(", "tvt", ".", "RandomResizedCrop", "(", "size", ",", "scale", "=", "scale", ",", "ratio", "=", "ratio", ",", "interpolation", "=", "interpolation", ")", ",", "[", "tvt", ".", "Resize", "(", "size", "+", "val_xtra_size", ")", ",", "tvt", ".", "CenterCrop", "(", "size", ")", "]", ")" ]
Resize images to `size` using `RandomResizedCrop`, passing along `kwargs` to train transform
[ "Resize", "images", "to", "size", "using", "RandomResizedCrop", "passing", "along", "kwargs", "to", "train", "transform" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/vision/data.py#L446-L451
20,871
fastai/fastai
fastai/vision/data.py
ImageDataBunch.create_from_ll
def create_from_ll(cls, lls:LabelLists, bs:int=64, val_bs:int=None, ds_tfms:Optional[TfmList]=None, num_workers:int=defaults.cpus, dl_tfms:Optional[Collection[Callable]]=None, device:torch.device=None, test:Optional[PathOrStr]=None, collate_fn:Callable=data_collate, size:int=None, no_check:bool=False, resize_method:ResizeMethod=None, mult:int=None, padding_mode:str='reflection', mode:str='bilinear', tfm_y:bool=False)->'ImageDataBunch': "Create an `ImageDataBunch` from `LabelLists` `lls` with potential `ds_tfms`." lls = lls.transform(tfms=ds_tfms, size=size, resize_method=resize_method, mult=mult, padding_mode=padding_mode, mode=mode, tfm_y=tfm_y) if test is not None: lls.add_test_folder(test) return lls.databunch(bs=bs, val_bs=val_bs, dl_tfms=dl_tfms, num_workers=num_workers, collate_fn=collate_fn, device=device, no_check=no_check)
python
def create_from_ll(cls, lls:LabelLists, bs:int=64, val_bs:int=None, ds_tfms:Optional[TfmList]=None, num_workers:int=defaults.cpus, dl_tfms:Optional[Collection[Callable]]=None, device:torch.device=None, test:Optional[PathOrStr]=None, collate_fn:Callable=data_collate, size:int=None, no_check:bool=False, resize_method:ResizeMethod=None, mult:int=None, padding_mode:str='reflection', mode:str='bilinear', tfm_y:bool=False)->'ImageDataBunch': "Create an `ImageDataBunch` from `LabelLists` `lls` with potential `ds_tfms`." lls = lls.transform(tfms=ds_tfms, size=size, resize_method=resize_method, mult=mult, padding_mode=padding_mode, mode=mode, tfm_y=tfm_y) if test is not None: lls.add_test_folder(test) return lls.databunch(bs=bs, val_bs=val_bs, dl_tfms=dl_tfms, num_workers=num_workers, collate_fn=collate_fn, device=device, no_check=no_check)
[ "def", "create_from_ll", "(", "cls", ",", "lls", ":", "LabelLists", ",", "bs", ":", "int", "=", "64", ",", "val_bs", ":", "int", "=", "None", ",", "ds_tfms", ":", "Optional", "[", "TfmList", "]", "=", "None", ",", "num_workers", ":", "int", "=", "defaults", ".", "cpus", ",", "dl_tfms", ":", "Optional", "[", "Collection", "[", "Callable", "]", "]", "=", "None", ",", "device", ":", "torch", ".", "device", "=", "None", ",", "test", ":", "Optional", "[", "PathOrStr", "]", "=", "None", ",", "collate_fn", ":", "Callable", "=", "data_collate", ",", "size", ":", "int", "=", "None", ",", "no_check", ":", "bool", "=", "False", ",", "resize_method", ":", "ResizeMethod", "=", "None", ",", "mult", ":", "int", "=", "None", ",", "padding_mode", ":", "str", "=", "'reflection'", ",", "mode", ":", "str", "=", "'bilinear'", ",", "tfm_y", ":", "bool", "=", "False", ")", "->", "'ImageDataBunch'", ":", "lls", "=", "lls", ".", "transform", "(", "tfms", "=", "ds_tfms", ",", "size", "=", "size", ",", "resize_method", "=", "resize_method", ",", "mult", "=", "mult", ",", "padding_mode", "=", "padding_mode", ",", "mode", "=", "mode", ",", "tfm_y", "=", "tfm_y", ")", "if", "test", "is", "not", "None", ":", "lls", ".", "add_test_folder", "(", "test", ")", "return", "lls", ".", "databunch", "(", "bs", "=", "bs", ",", "val_bs", "=", "val_bs", ",", "dl_tfms", "=", "dl_tfms", ",", "num_workers", "=", "num_workers", ",", "collate_fn", "=", "collate_fn", ",", "device", "=", "device", ",", "no_check", "=", "no_check", ")" ]
Create an `ImageDataBunch` from `LabelLists` `lls` with potential `ds_tfms`.
[ "Create", "an", "ImageDataBunch", "from", "LabelLists", "lls", "with", "potential", "ds_tfms", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/vision/data.py#L90-L100
20,872
fastai/fastai
fastai/vision/data.py
ImageDataBunch.from_df
def from_df(cls, path:PathOrStr, df:pd.DataFrame, folder:PathOrStr=None, label_delim:str=None, valid_pct:float=0.2, fn_col:IntsOrStrs=0, label_col:IntsOrStrs=1, suffix:str='', **kwargs:Any)->'ImageDataBunch': "Create from a `DataFrame` `df`." src = (ImageList.from_df(df, path=path, folder=folder, suffix=suffix, cols=fn_col) .split_by_rand_pct(valid_pct) .label_from_df(label_delim=label_delim, cols=label_col)) return cls.create_from_ll(src, **kwargs)
python
def from_df(cls, path:PathOrStr, df:pd.DataFrame, folder:PathOrStr=None, label_delim:str=None, valid_pct:float=0.2, fn_col:IntsOrStrs=0, label_col:IntsOrStrs=1, suffix:str='', **kwargs:Any)->'ImageDataBunch': "Create from a `DataFrame` `df`." src = (ImageList.from_df(df, path=path, folder=folder, suffix=suffix, cols=fn_col) .split_by_rand_pct(valid_pct) .label_from_df(label_delim=label_delim, cols=label_col)) return cls.create_from_ll(src, **kwargs)
[ "def", "from_df", "(", "cls", ",", "path", ":", "PathOrStr", ",", "df", ":", "pd", ".", "DataFrame", ",", "folder", ":", "PathOrStr", "=", "None", ",", "label_delim", ":", "str", "=", "None", ",", "valid_pct", ":", "float", "=", "0.2", ",", "fn_col", ":", "IntsOrStrs", "=", "0", ",", "label_col", ":", "IntsOrStrs", "=", "1", ",", "suffix", ":", "str", "=", "''", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "'ImageDataBunch'", ":", "src", "=", "(", "ImageList", ".", "from_df", "(", "df", ",", "path", "=", "path", ",", "folder", "=", "folder", ",", "suffix", "=", "suffix", ",", "cols", "=", "fn_col", ")", ".", "split_by_rand_pct", "(", "valid_pct", ")", ".", "label_from_df", "(", "label_delim", "=", "label_delim", ",", "cols", "=", "label_col", ")", ")", "return", "cls", ".", "create_from_ll", "(", "src", ",", "*", "*", "kwargs", ")" ]
Create from a `DataFrame` `df`.
[ "Create", "from", "a", "DataFrame", "df", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/vision/data.py#L114-L120
20,873
fastai/fastai
fastai/vision/data.py
ImageDataBunch.from_lists
def from_lists(cls, path:PathOrStr, fnames:FilePathList, labels:Collection[str], valid_pct:float=0.2, item_cls:Callable=None, **kwargs): "Create from list of `fnames` in `path`." item_cls = ifnone(item_cls, ImageList) fname2label = {f:l for (f,l) in zip(fnames, labels)} src = (item_cls(fnames, path=path).split_by_rand_pct(valid_pct) .label_from_func(lambda x:fname2label[x])) return cls.create_from_ll(src, **kwargs)
python
def from_lists(cls, path:PathOrStr, fnames:FilePathList, labels:Collection[str], valid_pct:float=0.2, item_cls:Callable=None, **kwargs): "Create from list of `fnames` in `path`." item_cls = ifnone(item_cls, ImageList) fname2label = {f:l for (f,l) in zip(fnames, labels)} src = (item_cls(fnames, path=path).split_by_rand_pct(valid_pct) .label_from_func(lambda x:fname2label[x])) return cls.create_from_ll(src, **kwargs)
[ "def", "from_lists", "(", "cls", ",", "path", ":", "PathOrStr", ",", "fnames", ":", "FilePathList", ",", "labels", ":", "Collection", "[", "str", "]", ",", "valid_pct", ":", "float", "=", "0.2", ",", "item_cls", ":", "Callable", "=", "None", ",", "*", "*", "kwargs", ")", ":", "item_cls", "=", "ifnone", "(", "item_cls", ",", "ImageList", ")", "fname2label", "=", "{", "f", ":", "l", "for", "(", "f", ",", "l", ")", "in", "zip", "(", "fnames", ",", "labels", ")", "}", "src", "=", "(", "item_cls", "(", "fnames", ",", "path", "=", "path", ")", ".", "split_by_rand_pct", "(", "valid_pct", ")", ".", "label_from_func", "(", "lambda", "x", ":", "fname2label", "[", "x", "]", ")", ")", "return", "cls", ".", "create_from_ll", "(", "src", ",", "*", "*", "kwargs", ")" ]
Create from list of `fnames` in `path`.
[ "Create", "from", "list", "of", "fnames", "in", "path", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/vision/data.py#L133-L140
20,874
fastai/fastai
fastai/vision/data.py
ImageDataBunch.from_name_func
def from_name_func(cls, path:PathOrStr, fnames:FilePathList, label_func:Callable, valid_pct:float=0.2, **kwargs): "Create from list of `fnames` in `path` with `label_func`." src = ImageList(fnames, path=path).split_by_rand_pct(valid_pct) return cls.create_from_ll(src.label_from_func(label_func), **kwargs)
python
def from_name_func(cls, path:PathOrStr, fnames:FilePathList, label_func:Callable, valid_pct:float=0.2, **kwargs): "Create from list of `fnames` in `path` with `label_func`." src = ImageList(fnames, path=path).split_by_rand_pct(valid_pct) return cls.create_from_ll(src.label_from_func(label_func), **kwargs)
[ "def", "from_name_func", "(", "cls", ",", "path", ":", "PathOrStr", ",", "fnames", ":", "FilePathList", ",", "label_func", ":", "Callable", ",", "valid_pct", ":", "float", "=", "0.2", ",", "*", "*", "kwargs", ")", ":", "src", "=", "ImageList", "(", "fnames", ",", "path", "=", "path", ")", ".", "split_by_rand_pct", "(", "valid_pct", ")", "return", "cls", ".", "create_from_ll", "(", "src", ".", "label_from_func", "(", "label_func", ")", ",", "*", "*", "kwargs", ")" ]
Create from list of `fnames` in `path` with `label_func`.
[ "Create", "from", "list", "of", "fnames", "in", "path", "with", "label_func", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/vision/data.py#L143-L146
20,875
fastai/fastai
fastai/vision/data.py
ImageDataBunch.from_name_re
def from_name_re(cls, path:PathOrStr, fnames:FilePathList, pat:str, valid_pct:float=0.2, **kwargs): "Create from list of `fnames` in `path` with re expression `pat`." pat = re.compile(pat) def _get_label(fn): if isinstance(fn, Path): fn = fn.as_posix() res = pat.search(str(fn)) assert res,f'Failed to find "{pat}" in "{fn}"' return res.group(1) return cls.from_name_func(path, fnames, _get_label, valid_pct=valid_pct, **kwargs)
python
def from_name_re(cls, path:PathOrStr, fnames:FilePathList, pat:str, valid_pct:float=0.2, **kwargs): "Create from list of `fnames` in `path` with re expression `pat`." pat = re.compile(pat) def _get_label(fn): if isinstance(fn, Path): fn = fn.as_posix() res = pat.search(str(fn)) assert res,f'Failed to find "{pat}" in "{fn}"' return res.group(1) return cls.from_name_func(path, fnames, _get_label, valid_pct=valid_pct, **kwargs)
[ "def", "from_name_re", "(", "cls", ",", "path", ":", "PathOrStr", ",", "fnames", ":", "FilePathList", ",", "pat", ":", "str", ",", "valid_pct", ":", "float", "=", "0.2", ",", "*", "*", "kwargs", ")", ":", "pat", "=", "re", ".", "compile", "(", "pat", ")", "def", "_get_label", "(", "fn", ")", ":", "if", "isinstance", "(", "fn", ",", "Path", ")", ":", "fn", "=", "fn", ".", "as_posix", "(", ")", "res", "=", "pat", ".", "search", "(", "str", "(", "fn", ")", ")", "assert", "res", ",", "f'Failed to find \"{pat}\" in \"{fn}\"'", "return", "res", ".", "group", "(", "1", ")", "return", "cls", ".", "from_name_func", "(", "path", ",", "fnames", ",", "_get_label", ",", "valid_pct", "=", "valid_pct", ",", "*", "*", "kwargs", ")" ]
Create from list of `fnames` in `path` with re expression `pat`.
[ "Create", "from", "list", "of", "fnames", "in", "path", "with", "re", "expression", "pat", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/vision/data.py#L149-L157
20,876
fastai/fastai
fastai/vision/data.py
ImageDataBunch.batch_stats
def batch_stats(self, funcs:Collection[Callable]=None, ds_type:DatasetType=DatasetType.Train)->Tensor: "Grab a batch of data and call reduction function `func` per channel" funcs = ifnone(funcs, [torch.mean,torch.std]) x = self.one_batch(ds_type=ds_type, denorm=False)[0].cpu() return [func(channel_view(x), 1) for func in funcs]
python
def batch_stats(self, funcs:Collection[Callable]=None, ds_type:DatasetType=DatasetType.Train)->Tensor: "Grab a batch of data and call reduction function `func` per channel" funcs = ifnone(funcs, [torch.mean,torch.std]) x = self.one_batch(ds_type=ds_type, denorm=False)[0].cpu() return [func(channel_view(x), 1) for func in funcs]
[ "def", "batch_stats", "(", "self", ",", "funcs", ":", "Collection", "[", "Callable", "]", "=", "None", ",", "ds_type", ":", "DatasetType", "=", "DatasetType", ".", "Train", ")", "->", "Tensor", ":", "funcs", "=", "ifnone", "(", "funcs", ",", "[", "torch", ".", "mean", ",", "torch", ".", "std", "]", ")", "x", "=", "self", ".", "one_batch", "(", "ds_type", "=", "ds_type", ",", "denorm", "=", "False", ")", "[", "0", "]", ".", "cpu", "(", ")", "return", "[", "func", "(", "channel_view", "(", "x", ")", ",", "1", ")", "for", "func", "in", "funcs", "]" ]
Grab a batch of data and call reduction function `func` per channel
[ "Grab", "a", "batch", "of", "data", "and", "call", "reduction", "function", "func", "per", "channel" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/vision/data.py#L167-L171
20,877
fastai/fastai
fastai/vision/data.py
ImageList.open
def open(self, fn): "Open image in `fn`, subclass and overwrite for custom behavior." return open_image(fn, convert_mode=self.convert_mode, after_open=self.after_open)
python
def open(self, fn): "Open image in `fn`, subclass and overwrite for custom behavior." return open_image(fn, convert_mode=self.convert_mode, after_open=self.after_open)
[ "def", "open", "(", "self", ",", "fn", ")", ":", "return", "open_image", "(", "fn", ",", "convert_mode", "=", "self", ".", "convert_mode", ",", "after_open", "=", "self", ".", "after_open", ")" ]
Open image in `fn`, subclass and overwrite for custom behavior.
[ "Open", "image", "in", "fn", "subclass", "and", "overwrite", "for", "custom", "behavior", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/vision/data.py#L264-L266
20,878
fastai/fastai
fastai/vision/data.py
ImageList.from_folder
def from_folder(cls, path:PathOrStr='.', extensions:Collection[str]=None, **kwargs)->ItemList: "Get the list of files in `path` that have an image suffix. `recurse` determines if we search subfolders." extensions = ifnone(extensions, image_extensions) return super().from_folder(path=path, extensions=extensions, **kwargs)
python
def from_folder(cls, path:PathOrStr='.', extensions:Collection[str]=None, **kwargs)->ItemList: "Get the list of files in `path` that have an image suffix. `recurse` determines if we search subfolders." extensions = ifnone(extensions, image_extensions) return super().from_folder(path=path, extensions=extensions, **kwargs)
[ "def", "from_folder", "(", "cls", ",", "path", ":", "PathOrStr", "=", "'.'", ",", "extensions", ":", "Collection", "[", "str", "]", "=", "None", ",", "*", "*", "kwargs", ")", "->", "ItemList", ":", "extensions", "=", "ifnone", "(", "extensions", ",", "image_extensions", ")", "return", "super", "(", ")", ".", "from_folder", "(", "path", "=", "path", ",", "extensions", "=", "extensions", ",", "*", "*", "kwargs", ")" ]
Get the list of files in `path` that have an image suffix. `recurse` determines if we search subfolders.
[ "Get", "the", "list", "of", "files", "in", "path", "that", "have", "an", "image", "suffix", ".", "recurse", "determines", "if", "we", "search", "subfolders", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/vision/data.py#L275-L278
20,879
fastai/fastai
fastai/vision/data.py
ImageList.from_df
def from_df(cls, df:DataFrame, path:PathOrStr, cols:IntsOrStrs=0, folder:PathOrStr=None, suffix:str='', **kwargs)->'ItemList': "Get the filenames in `cols` of `df` with `folder` in front of them, `suffix` at the end." suffix = suffix or '' res = super().from_df(df, path=path, cols=cols, **kwargs) pref = f'{res.path}{os.path.sep}' if folder is not None: pref += f'{folder}{os.path.sep}' res.items = np.char.add(np.char.add(pref, res.items.astype(str)), suffix) return res
python
def from_df(cls, df:DataFrame, path:PathOrStr, cols:IntsOrStrs=0, folder:PathOrStr=None, suffix:str='', **kwargs)->'ItemList': "Get the filenames in `cols` of `df` with `folder` in front of them, `suffix` at the end." suffix = suffix or '' res = super().from_df(df, path=path, cols=cols, **kwargs) pref = f'{res.path}{os.path.sep}' if folder is not None: pref += f'{folder}{os.path.sep}' res.items = np.char.add(np.char.add(pref, res.items.astype(str)), suffix) return res
[ "def", "from_df", "(", "cls", ",", "df", ":", "DataFrame", ",", "path", ":", "PathOrStr", ",", "cols", ":", "IntsOrStrs", "=", "0", ",", "folder", ":", "PathOrStr", "=", "None", ",", "suffix", ":", "str", "=", "''", ",", "*", "*", "kwargs", ")", "->", "'ItemList'", ":", "suffix", "=", "suffix", "or", "''", "res", "=", "super", "(", ")", ".", "from_df", "(", "df", ",", "path", "=", "path", ",", "cols", "=", "cols", ",", "*", "*", "kwargs", ")", "pref", "=", "f'{res.path}{os.path.sep}'", "if", "folder", "is", "not", "None", ":", "pref", "+=", "f'{folder}{os.path.sep}'", "res", ".", "items", "=", "np", ".", "char", ".", "add", "(", "np", ".", "char", ".", "add", "(", "pref", ",", "res", ".", "items", ".", "astype", "(", "str", ")", ")", ",", "suffix", ")", "return", "res" ]
Get the filenames in `cols` of `df` with `folder` in front of them, `suffix` at the end.
[ "Get", "the", "filenames", "in", "cols", "of", "df", "with", "folder", "in", "front", "of", "them", "suffix", "at", "the", "end", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/vision/data.py#L281-L288
20,880
fastai/fastai
fastai/vision/data.py
ObjectCategoryProcessor.generate_classes
def generate_classes(self, items): "Generate classes from unique `items` and add `background`." classes = super().generate_classes([o[1] for o in items]) classes = ['background'] + list(classes) return classes
python
def generate_classes(self, items): "Generate classes from unique `items` and add `background`." classes = super().generate_classes([o[1] for o in items]) classes = ['background'] + list(classes) return classes
[ "def", "generate_classes", "(", "self", ",", "items", ")", ":", "classes", "=", "super", "(", ")", ".", "generate_classes", "(", "[", "o", "[", "1", "]", "for", "o", "in", "items", "]", ")", "classes", "=", "[", "'background'", "]", "+", "list", "(", "classes", ")", "return", "classes" ]
Generate classes from unique `items` and add `background`.
[ "Generate", "classes", "from", "unique", "items", "and", "add", "background", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/vision/data.py#L335-L339
20,881
fastai/fastai
fastai/utils/mem.py
reduce_mem_usage
def reduce_mem_usage(df): """ iterate through all the columns of a dataframe and modify the data type to reduce memory usage. """ start_mem = df.memory_usage().sum() / 1024**2 print('Memory usage of dataframe is {:.2f} MB'.format(start_mem)) #Removed from debugging columns = df.columns #.drop('index') for col in columns: col_type = df[col].dtype if str(col_type) != 'category' and col_type != 'datetime64[ns]' and col_type != bool: if col_type != object: c_min = df[col].min() c_max = df[col].max() if str(col_type)[:3] == 'int': if c_min > np.iinfo(np.int8).min and c_max < np.iinfo(np.int8).max: df[col] = df[col].astype(np.int8) elif c_min > np.iinfo(np.int16).min and c_max < np.iinfo(np.int16).max: df[col] = df[col].astype(np.int16) elif c_min > np.iinfo(np.int32).min and c_max < np.iinfo(np.int32).max: df[col] = df[col].astype(np.int32) elif c_min > np.iinfo(np.int64).min and c_max < np.iinfo(np.int64).max: df[col] = df[col].astype(np.int64) else: #if c_min > np.finfo(np.float16).min and c_max < np.finfo(np.float16).max: #df[col] = df[col].astype(np.float16) #Sometimes causes and error and had to remove if c_min > np.finfo(np.float32).min and c_max < np.finfo(np.float32).max: df[col] = df[col].astype(np.float32) else: print('Error '+col+' Value would be a float64. Disregarding.') else: df[col] = df[col].astype('category') end_mem = df.memory_usage().sum() / 1024**2 print('Memory usage after optimization is: {:.2f} MB'.format(end_mem)) print('Decreased by {:.1f}%'.format(100 * (start_mem - end_mem) / start_mem)) return df
python
def reduce_mem_usage(df): """ iterate through all the columns of a dataframe and modify the data type to reduce memory usage. """ start_mem = df.memory_usage().sum() / 1024**2 print('Memory usage of dataframe is {:.2f} MB'.format(start_mem)) #Removed from debugging columns = df.columns #.drop('index') for col in columns: col_type = df[col].dtype if str(col_type) != 'category' and col_type != 'datetime64[ns]' and col_type != bool: if col_type != object: c_min = df[col].min() c_max = df[col].max() if str(col_type)[:3] == 'int': if c_min > np.iinfo(np.int8).min and c_max < np.iinfo(np.int8).max: df[col] = df[col].astype(np.int8) elif c_min > np.iinfo(np.int16).min and c_max < np.iinfo(np.int16).max: df[col] = df[col].astype(np.int16) elif c_min > np.iinfo(np.int32).min and c_max < np.iinfo(np.int32).max: df[col] = df[col].astype(np.int32) elif c_min > np.iinfo(np.int64).min and c_max < np.iinfo(np.int64).max: df[col] = df[col].astype(np.int64) else: #if c_min > np.finfo(np.float16).min and c_max < np.finfo(np.float16).max: #df[col] = df[col].astype(np.float16) #Sometimes causes and error and had to remove if c_min > np.finfo(np.float32).min and c_max < np.finfo(np.float32).max: df[col] = df[col].astype(np.float32) else: print('Error '+col+' Value would be a float64. Disregarding.') else: df[col] = df[col].astype('category') end_mem = df.memory_usage().sum() / 1024**2 print('Memory usage after optimization is: {:.2f} MB'.format(end_mem)) print('Decreased by {:.1f}%'.format(100 * (start_mem - end_mem) / start_mem)) return df
[ "def", "reduce_mem_usage", "(", "df", ")", ":", "start_mem", "=", "df", ".", "memory_usage", "(", ")", ".", "sum", "(", ")", "/", "1024", "**", "2", "print", "(", "'Memory usage of dataframe is {:.2f} MB'", ".", "format", "(", "start_mem", ")", ")", "#Removed from debugging", "columns", "=", "df", ".", "columns", "#.drop('index')", "for", "col", "in", "columns", ":", "col_type", "=", "df", "[", "col", "]", ".", "dtype", "if", "str", "(", "col_type", ")", "!=", "'category'", "and", "col_type", "!=", "'datetime64[ns]'", "and", "col_type", "!=", "bool", ":", "if", "col_type", "!=", "object", ":", "c_min", "=", "df", "[", "col", "]", ".", "min", "(", ")", "c_max", "=", "df", "[", "col", "]", ".", "max", "(", ")", "if", "str", "(", "col_type", ")", "[", ":", "3", "]", "==", "'int'", ":", "if", "c_min", ">", "np", ".", "iinfo", "(", "np", ".", "int8", ")", ".", "min", "and", "c_max", "<", "np", ".", "iinfo", "(", "np", ".", "int8", ")", ".", "max", ":", "df", "[", "col", "]", "=", "df", "[", "col", "]", ".", "astype", "(", "np", ".", "int8", ")", "elif", "c_min", ">", "np", ".", "iinfo", "(", "np", ".", "int16", ")", ".", "min", "and", "c_max", "<", "np", ".", "iinfo", "(", "np", ".", "int16", ")", ".", "max", ":", "df", "[", "col", "]", "=", "df", "[", "col", "]", ".", "astype", "(", "np", ".", "int16", ")", "elif", "c_min", ">", "np", ".", "iinfo", "(", "np", ".", "int32", ")", ".", "min", "and", "c_max", "<", "np", ".", "iinfo", "(", "np", ".", "int32", ")", ".", "max", ":", "df", "[", "col", "]", "=", "df", "[", "col", "]", ".", "astype", "(", "np", ".", "int32", ")", "elif", "c_min", ">", "np", ".", "iinfo", "(", "np", ".", "int64", ")", ".", "min", "and", "c_max", "<", "np", ".", "iinfo", "(", "np", ".", "int64", ")", ".", "max", ":", "df", "[", "col", "]", "=", "df", "[", "col", "]", ".", "astype", "(", "np", ".", "int64", ")", "else", ":", "#if c_min > np.finfo(np.float16).min and c_max < np.finfo(np.float16).max:", "#df[col] = df[col].astype(np.float16)", "#Sometimes causes and error and had to remove", "if", "c_min", ">", "np", ".", "finfo", "(", "np", ".", "float32", ")", ".", "min", "and", "c_max", "<", "np", ".", "finfo", "(", "np", ".", "float32", ")", ".", "max", ":", "df", "[", "col", "]", "=", "df", "[", "col", "]", ".", "astype", "(", "np", ".", "float32", ")", "else", ":", "print", "(", "'Error '", "+", "col", "+", "' Value would be a float64. Disregarding.'", ")", "else", ":", "df", "[", "col", "]", "=", "df", "[", "col", "]", ".", "astype", "(", "'category'", ")", "end_mem", "=", "df", ".", "memory_usage", "(", ")", ".", "sum", "(", ")", "/", "1024", "**", "2", "print", "(", "'Memory usage after optimization is: {:.2f} MB'", ".", "format", "(", "end_mem", ")", ")", "print", "(", "'Decreased by {:.1f}%'", ".", "format", "(", "100", "*", "(", "start_mem", "-", "end_mem", ")", "/", "start_mem", ")", ")", "return", "df" ]
iterate through all the columns of a dataframe and modify the data type to reduce memory usage.
[ "iterate", "through", "all", "the", "columns", "of", "a", "dataframe", "and", "modify", "the", "data", "type", "to", "reduce", "memory", "usage", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/utils/mem.py#L177-L218
20,882
fastai/fastai
fastai/distributed.py
_learner_distributed
def _learner_distributed(learn:Learner, cuda_id:int, cache_dir:PathOrStr='tmp'): "Put `learn` on distributed training with `cuda_id`." learn.callbacks.append(DistributedTrainer(learn, cuda_id)) learn.callbacks.append(DistributedRecorder(learn, cuda_id, cache_dir)) return learn
python
def _learner_distributed(learn:Learner, cuda_id:int, cache_dir:PathOrStr='tmp'): "Put `learn` on distributed training with `cuda_id`." learn.callbacks.append(DistributedTrainer(learn, cuda_id)) learn.callbacks.append(DistributedRecorder(learn, cuda_id, cache_dir)) return learn
[ "def", "_learner_distributed", "(", "learn", ":", "Learner", ",", "cuda_id", ":", "int", ",", "cache_dir", ":", "PathOrStr", "=", "'tmp'", ")", ":", "learn", ".", "callbacks", ".", "append", "(", "DistributedTrainer", "(", "learn", ",", "cuda_id", ")", ")", "learn", ".", "callbacks", ".", "append", "(", "DistributedRecorder", "(", "learn", ",", "cuda_id", ",", "cache_dir", ")", ")", "return", "learn" ]
Put `learn` on distributed training with `cuda_id`.
[ "Put", "learn", "on", "distributed", "training", "with", "cuda_id", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/distributed.py#L70-L74
20,883
fastai/fastai
fastai/vision/models/xresnet2.py
xresnet18
def xresnet18(pretrained=False, **kwargs): """Constructs a XResNet-18 model. Args: pretrained (bool): If True, returns a model pre-trained on ImageNet """ model = XResNet(BasicBlock, [2, 2, 2, 2], **kwargs) if pretrained: model.load_state_dict(model_zoo.load_url(model_urls['xresnet18'])) return model
python
def xresnet18(pretrained=False, **kwargs): """Constructs a XResNet-18 model. Args: pretrained (bool): If True, returns a model pre-trained on ImageNet """ model = XResNet(BasicBlock, [2, 2, 2, 2], **kwargs) if pretrained: model.load_state_dict(model_zoo.load_url(model_urls['xresnet18'])) return model
[ "def", "xresnet18", "(", "pretrained", "=", "False", ",", "*", "*", "kwargs", ")", ":", "model", "=", "XResNet", "(", "BasicBlock", ",", "[", "2", ",", "2", ",", "2", ",", "2", "]", ",", "*", "*", "kwargs", ")", "if", "pretrained", ":", "model", ".", "load_state_dict", "(", "model_zoo", ".", "load_url", "(", "model_urls", "[", "'xresnet18'", "]", ")", ")", "return", "model" ]
Constructs a XResNet-18 model. Args: pretrained (bool): If True, returns a model pre-trained on ImageNet
[ "Constructs", "a", "XResNet", "-", "18", "model", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/vision/models/xresnet2.py#L148-L156
20,884
fastai/fastai
fastai/vision/models/xresnet2.py
xresnet50_2
def xresnet50_2(pretrained=False, **kwargs): """Constructs a XResNet-50 model. Args: pretrained (bool): If True, returns a model pre-trained on ImageNet """ model = XResNet(Bottleneck, [3, 4, 6, 3], **kwargs) if pretrained: model.load_state_dict(model_zoo.load_url(model_urls['xresnet50'])) return model
python
def xresnet50_2(pretrained=False, **kwargs): """Constructs a XResNet-50 model. Args: pretrained (bool): If True, returns a model pre-trained on ImageNet """ model = XResNet(Bottleneck, [3, 4, 6, 3], **kwargs) if pretrained: model.load_state_dict(model_zoo.load_url(model_urls['xresnet50'])) return model
[ "def", "xresnet50_2", "(", "pretrained", "=", "False", ",", "*", "*", "kwargs", ")", ":", "model", "=", "XResNet", "(", "Bottleneck", ",", "[", "3", ",", "4", ",", "6", ",", "3", "]", ",", "*", "*", "kwargs", ")", "if", "pretrained", ":", "model", ".", "load_state_dict", "(", "model_zoo", ".", "load_url", "(", "model_urls", "[", "'xresnet50'", "]", ")", ")", "return", "model" ]
Constructs a XResNet-50 model. Args: pretrained (bool): If True, returns a model pre-trained on ImageNet
[ "Constructs", "a", "XResNet", "-", "50", "model", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/vision/models/xresnet2.py#L170-L178
20,885
fastai/fastai
fastai/basic_train.py
loss_batch
def loss_batch(model:nn.Module, xb:Tensor, yb:Tensor, loss_func:OptLossFunc=None, opt:OptOptimizer=None, cb_handler:Optional[CallbackHandler]=None)->Tuple[Union[Tensor,int,float,str]]: "Calculate loss and metrics for a batch, call out to callbacks as necessary." cb_handler = ifnone(cb_handler, CallbackHandler()) if not is_listy(xb): xb = [xb] if not is_listy(yb): yb = [yb] out = model(*xb) out = cb_handler.on_loss_begin(out) if not loss_func: return to_detach(out), yb[0].detach() loss = loss_func(out, *yb) if opt is not None: loss,skip_bwd = cb_handler.on_backward_begin(loss) if not skip_bwd: loss.backward() if not cb_handler.on_backward_end(): opt.step() if not cb_handler.on_step_end(): opt.zero_grad() return loss.detach().cpu()
python
def loss_batch(model:nn.Module, xb:Tensor, yb:Tensor, loss_func:OptLossFunc=None, opt:OptOptimizer=None, cb_handler:Optional[CallbackHandler]=None)->Tuple[Union[Tensor,int,float,str]]: "Calculate loss and metrics for a batch, call out to callbacks as necessary." cb_handler = ifnone(cb_handler, CallbackHandler()) if not is_listy(xb): xb = [xb] if not is_listy(yb): yb = [yb] out = model(*xb) out = cb_handler.on_loss_begin(out) if not loss_func: return to_detach(out), yb[0].detach() loss = loss_func(out, *yb) if opt is not None: loss,skip_bwd = cb_handler.on_backward_begin(loss) if not skip_bwd: loss.backward() if not cb_handler.on_backward_end(): opt.step() if not cb_handler.on_step_end(): opt.zero_grad() return loss.detach().cpu()
[ "def", "loss_batch", "(", "model", ":", "nn", ".", "Module", ",", "xb", ":", "Tensor", ",", "yb", ":", "Tensor", ",", "loss_func", ":", "OptLossFunc", "=", "None", ",", "opt", ":", "OptOptimizer", "=", "None", ",", "cb_handler", ":", "Optional", "[", "CallbackHandler", "]", "=", "None", ")", "->", "Tuple", "[", "Union", "[", "Tensor", ",", "int", ",", "float", ",", "str", "]", "]", ":", "cb_handler", "=", "ifnone", "(", "cb_handler", ",", "CallbackHandler", "(", ")", ")", "if", "not", "is_listy", "(", "xb", ")", ":", "xb", "=", "[", "xb", "]", "if", "not", "is_listy", "(", "yb", ")", ":", "yb", "=", "[", "yb", "]", "out", "=", "model", "(", "*", "xb", ")", "out", "=", "cb_handler", ".", "on_loss_begin", "(", "out", ")", "if", "not", "loss_func", ":", "return", "to_detach", "(", "out", ")", ",", "yb", "[", "0", "]", ".", "detach", "(", ")", "loss", "=", "loss_func", "(", "out", ",", "*", "yb", ")", "if", "opt", "is", "not", "None", ":", "loss", ",", "skip_bwd", "=", "cb_handler", ".", "on_backward_begin", "(", "loss", ")", "if", "not", "skip_bwd", ":", "loss", ".", "backward", "(", ")", "if", "not", "cb_handler", ".", "on_backward_end", "(", ")", ":", "opt", ".", "step", "(", ")", "if", "not", "cb_handler", ".", "on_step_end", "(", ")", ":", "opt", ".", "zero_grad", "(", ")", "return", "loss", ".", "detach", "(", ")", ".", "cpu", "(", ")" ]
Calculate loss and metrics for a batch, call out to callbacks as necessary.
[ "Calculate", "loss", "and", "metrics", "for", "a", "batch", "call", "out", "to", "callbacks", "as", "necessary", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/basic_train.py#L20-L38
20,886
fastai/fastai
fastai/basic_train.py
validate
def validate(model:nn.Module, dl:DataLoader, loss_func:OptLossFunc=None, cb_handler:Optional[CallbackHandler]=None, pbar:Optional[PBar]=None, average=True, n_batch:Optional[int]=None)->Iterator[Tuple[Union[Tensor,int],...]]: "Calculate `loss_func` of `model` on `dl` in evaluation mode." model.eval() with torch.no_grad(): val_losses,nums = [],[] if cb_handler: cb_handler.set_dl(dl) for xb,yb in progress_bar(dl, parent=pbar, leave=(pbar is not None)): if cb_handler: xb, yb = cb_handler.on_batch_begin(xb, yb, train=False) val_loss = loss_batch(model, xb, yb, loss_func, cb_handler=cb_handler) val_losses.append(val_loss) if not is_listy(yb): yb = [yb] nums.append(yb[0].shape[0]) if cb_handler and cb_handler.on_batch_end(val_losses[-1]): break if n_batch and (len(nums)>=n_batch): break nums = np.array(nums, dtype=np.float32) if average: return (to_np(torch.stack(val_losses)) * nums).sum() / nums.sum() else: return val_losses
python
def validate(model:nn.Module, dl:DataLoader, loss_func:OptLossFunc=None, cb_handler:Optional[CallbackHandler]=None, pbar:Optional[PBar]=None, average=True, n_batch:Optional[int]=None)->Iterator[Tuple[Union[Tensor,int],...]]: "Calculate `loss_func` of `model` on `dl` in evaluation mode." model.eval() with torch.no_grad(): val_losses,nums = [],[] if cb_handler: cb_handler.set_dl(dl) for xb,yb in progress_bar(dl, parent=pbar, leave=(pbar is not None)): if cb_handler: xb, yb = cb_handler.on_batch_begin(xb, yb, train=False) val_loss = loss_batch(model, xb, yb, loss_func, cb_handler=cb_handler) val_losses.append(val_loss) if not is_listy(yb): yb = [yb] nums.append(yb[0].shape[0]) if cb_handler and cb_handler.on_batch_end(val_losses[-1]): break if n_batch and (len(nums)>=n_batch): break nums = np.array(nums, dtype=np.float32) if average: return (to_np(torch.stack(val_losses)) * nums).sum() / nums.sum() else: return val_losses
[ "def", "validate", "(", "model", ":", "nn", ".", "Module", ",", "dl", ":", "DataLoader", ",", "loss_func", ":", "OptLossFunc", "=", "None", ",", "cb_handler", ":", "Optional", "[", "CallbackHandler", "]", "=", "None", ",", "pbar", ":", "Optional", "[", "PBar", "]", "=", "None", ",", "average", "=", "True", ",", "n_batch", ":", "Optional", "[", "int", "]", "=", "None", ")", "->", "Iterator", "[", "Tuple", "[", "Union", "[", "Tensor", ",", "int", "]", ",", "...", "]", "]", ":", "model", ".", "eval", "(", ")", "with", "torch", ".", "no_grad", "(", ")", ":", "val_losses", ",", "nums", "=", "[", "]", ",", "[", "]", "if", "cb_handler", ":", "cb_handler", ".", "set_dl", "(", "dl", ")", "for", "xb", ",", "yb", "in", "progress_bar", "(", "dl", ",", "parent", "=", "pbar", ",", "leave", "=", "(", "pbar", "is", "not", "None", ")", ")", ":", "if", "cb_handler", ":", "xb", ",", "yb", "=", "cb_handler", ".", "on_batch_begin", "(", "xb", ",", "yb", ",", "train", "=", "False", ")", "val_loss", "=", "loss_batch", "(", "model", ",", "xb", ",", "yb", ",", "loss_func", ",", "cb_handler", "=", "cb_handler", ")", "val_losses", ".", "append", "(", "val_loss", ")", "if", "not", "is_listy", "(", "yb", ")", ":", "yb", "=", "[", "yb", "]", "nums", ".", "append", "(", "yb", "[", "0", "]", ".", "shape", "[", "0", "]", ")", "if", "cb_handler", "and", "cb_handler", ".", "on_batch_end", "(", "val_losses", "[", "-", "1", "]", ")", ":", "break", "if", "n_batch", "and", "(", "len", "(", "nums", ")", ">=", "n_batch", ")", ":", "break", "nums", "=", "np", ".", "array", "(", "nums", ",", "dtype", "=", "np", ".", "float32", ")", "if", "average", ":", "return", "(", "to_np", "(", "torch", ".", "stack", "(", "val_losses", ")", ")", "*", "nums", ")", ".", "sum", "(", ")", "/", "nums", ".", "sum", "(", ")", "else", ":", "return", "val_losses" ]
Calculate `loss_func` of `model` on `dl` in evaluation mode.
[ "Calculate", "loss_func", "of", "model", "on", "dl", "in", "evaluation", "mode", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/basic_train.py#L50-L67
20,887
fastai/fastai
fastai/basic_train.py
train_epoch
def train_epoch(model:nn.Module, dl:DataLoader, opt:optim.Optimizer, loss_func:LossFunction)->None: "Simple training of `model` for 1 epoch of `dl` using optim `opt` and loss function `loss_func`." model.train() for xb,yb in dl: loss = loss_func(model(xb), yb) loss.backward() opt.step() opt.zero_grad()
python
def train_epoch(model:nn.Module, dl:DataLoader, opt:optim.Optimizer, loss_func:LossFunction)->None: "Simple training of `model` for 1 epoch of `dl` using optim `opt` and loss function `loss_func`." model.train() for xb,yb in dl: loss = loss_func(model(xb), yb) loss.backward() opt.step() opt.zero_grad()
[ "def", "train_epoch", "(", "model", ":", "nn", ".", "Module", ",", "dl", ":", "DataLoader", ",", "opt", ":", "optim", ".", "Optimizer", ",", "loss_func", ":", "LossFunction", ")", "->", "None", ":", "model", ".", "train", "(", ")", "for", "xb", ",", "yb", "in", "dl", ":", "loss", "=", "loss_func", "(", "model", "(", "xb", ")", ",", "yb", ")", "loss", ".", "backward", "(", ")", "opt", ".", "step", "(", ")", "opt", ".", "zero_grad", "(", ")" ]
Simple training of `model` for 1 epoch of `dl` using optim `opt` and loss function `loss_func`.
[ "Simple", "training", "of", "model", "for", "1", "epoch", "of", "dl", "using", "optim", "opt", "and", "loss", "function", "loss_func", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/basic_train.py#L69-L76
20,888
fastai/fastai
fastai/basic_train.py
fit
def fit(epochs:int, learn:BasicLearner, callbacks:Optional[CallbackList]=None, metrics:OptMetrics=None)->None: "Fit the `model` on `data` and learn using `loss_func` and `opt`." assert len(learn.data.train_dl) != 0, f"""Your training dataloader is empty, can't train a model. Use a smaller batch size (batch size={learn.data.train_dl.batch_size} for {len(learn.data.train_dl.dataset)} elements).""" cb_handler = CallbackHandler(callbacks, metrics) pbar = master_bar(range(epochs)) cb_handler.on_train_begin(epochs, pbar=pbar, metrics=metrics) exception=False try: for epoch in pbar: learn.model.train() cb_handler.set_dl(learn.data.train_dl) cb_handler.on_epoch_begin() for xb,yb in progress_bar(learn.data.train_dl, parent=pbar): xb, yb = cb_handler.on_batch_begin(xb, yb) loss = loss_batch(learn.model, xb, yb, learn.loss_func, learn.opt, cb_handler) if cb_handler.on_batch_end(loss): break if not cb_handler.skip_validate and not learn.data.empty_val: val_loss = validate(learn.model, learn.data.valid_dl, loss_func=learn.loss_func, cb_handler=cb_handler, pbar=pbar) else: val_loss=None if cb_handler.on_epoch_end(val_loss): break except Exception as e: exception = e raise finally: cb_handler.on_train_end(exception)
python
def fit(epochs:int, learn:BasicLearner, callbacks:Optional[CallbackList]=None, metrics:OptMetrics=None)->None: "Fit the `model` on `data` and learn using `loss_func` and `opt`." assert len(learn.data.train_dl) != 0, f"""Your training dataloader is empty, can't train a model. Use a smaller batch size (batch size={learn.data.train_dl.batch_size} for {len(learn.data.train_dl.dataset)} elements).""" cb_handler = CallbackHandler(callbacks, metrics) pbar = master_bar(range(epochs)) cb_handler.on_train_begin(epochs, pbar=pbar, metrics=metrics) exception=False try: for epoch in pbar: learn.model.train() cb_handler.set_dl(learn.data.train_dl) cb_handler.on_epoch_begin() for xb,yb in progress_bar(learn.data.train_dl, parent=pbar): xb, yb = cb_handler.on_batch_begin(xb, yb) loss = loss_batch(learn.model, xb, yb, learn.loss_func, learn.opt, cb_handler) if cb_handler.on_batch_end(loss): break if not cb_handler.skip_validate and not learn.data.empty_val: val_loss = validate(learn.model, learn.data.valid_dl, loss_func=learn.loss_func, cb_handler=cb_handler, pbar=pbar) else: val_loss=None if cb_handler.on_epoch_end(val_loss): break except Exception as e: exception = e raise finally: cb_handler.on_train_end(exception)
[ "def", "fit", "(", "epochs", ":", "int", ",", "learn", ":", "BasicLearner", ",", "callbacks", ":", "Optional", "[", "CallbackList", "]", "=", "None", ",", "metrics", ":", "OptMetrics", "=", "None", ")", "->", "None", ":", "assert", "len", "(", "learn", ".", "data", ".", "train_dl", ")", "!=", "0", ",", "f\"\"\"Your training dataloader is empty, can't train a model.\n Use a smaller batch size (batch size={learn.data.train_dl.batch_size} for {len(learn.data.train_dl.dataset)} elements).\"\"\"", "cb_handler", "=", "CallbackHandler", "(", "callbacks", ",", "metrics", ")", "pbar", "=", "master_bar", "(", "range", "(", "epochs", ")", ")", "cb_handler", ".", "on_train_begin", "(", "epochs", ",", "pbar", "=", "pbar", ",", "metrics", "=", "metrics", ")", "exception", "=", "False", "try", ":", "for", "epoch", "in", "pbar", ":", "learn", ".", "model", ".", "train", "(", ")", "cb_handler", ".", "set_dl", "(", "learn", ".", "data", ".", "train_dl", ")", "cb_handler", ".", "on_epoch_begin", "(", ")", "for", "xb", ",", "yb", "in", "progress_bar", "(", "learn", ".", "data", ".", "train_dl", ",", "parent", "=", "pbar", ")", ":", "xb", ",", "yb", "=", "cb_handler", ".", "on_batch_begin", "(", "xb", ",", "yb", ")", "loss", "=", "loss_batch", "(", "learn", ".", "model", ",", "xb", ",", "yb", ",", "learn", ".", "loss_func", ",", "learn", ".", "opt", ",", "cb_handler", ")", "if", "cb_handler", ".", "on_batch_end", "(", "loss", ")", ":", "break", "if", "not", "cb_handler", ".", "skip_validate", "and", "not", "learn", ".", "data", ".", "empty_val", ":", "val_loss", "=", "validate", "(", "learn", ".", "model", ",", "learn", ".", "data", ".", "valid_dl", ",", "loss_func", "=", "learn", ".", "loss_func", ",", "cb_handler", "=", "cb_handler", ",", "pbar", "=", "pbar", ")", "else", ":", "val_loss", "=", "None", "if", "cb_handler", ".", "on_epoch_end", "(", "val_loss", ")", ":", "break", "except", "Exception", "as", "e", ":", "exception", "=", "e", "raise", "finally", ":", "cb_handler", ".", "on_train_end", "(", "exception", ")" ]
Fit the `model` on `data` and learn using `loss_func` and `opt`.
[ "Fit", "the", "model", "on", "data", "and", "learn", "using", "loss_func", "and", "opt", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/basic_train.py#L85-L112
20,889
fastai/fastai
fastai/basic_train.py
Recorder.on_train_begin
def on_train_begin(self, pbar:PBar, metrics_names:Collection[str], **kwargs:Any)->None: "Initialize recording status at beginning of training." self.pbar = pbar self.names = ['epoch', 'train_loss'] if self.no_val else ['epoch', 'train_loss', 'valid_loss'] self.metrics_names = metrics_names self.names += self.metrics_names if hasattr(self, '_added_met_names'): self.names += self._added_met_names if self.add_time: self.names.append('time') if not self.silent: self.pbar.write(self.names, table=True) self.losses,self.val_losses,self.lrs,self.moms,self.metrics,self.nb_batches = [],[],[],[],[],[]
python
def on_train_begin(self, pbar:PBar, metrics_names:Collection[str], **kwargs:Any)->None: "Initialize recording status at beginning of training." self.pbar = pbar self.names = ['epoch', 'train_loss'] if self.no_val else ['epoch', 'train_loss', 'valid_loss'] self.metrics_names = metrics_names self.names += self.metrics_names if hasattr(self, '_added_met_names'): self.names += self._added_met_names if self.add_time: self.names.append('time') if not self.silent: self.pbar.write(self.names, table=True) self.losses,self.val_losses,self.lrs,self.moms,self.metrics,self.nb_batches = [],[],[],[],[],[]
[ "def", "on_train_begin", "(", "self", ",", "pbar", ":", "PBar", ",", "metrics_names", ":", "Collection", "[", "str", "]", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "None", ":", "self", ".", "pbar", "=", "pbar", "self", ".", "names", "=", "[", "'epoch'", ",", "'train_loss'", "]", "if", "self", ".", "no_val", "else", "[", "'epoch'", ",", "'train_loss'", ",", "'valid_loss'", "]", "self", ".", "metrics_names", "=", "metrics_names", "self", ".", "names", "+=", "self", ".", "metrics_names", "if", "hasattr", "(", "self", ",", "'_added_met_names'", ")", ":", "self", ".", "names", "+=", "self", ".", "_added_met_names", "if", "self", ".", "add_time", ":", "self", ".", "names", ".", "append", "(", "'time'", ")", "if", "not", "self", ".", "silent", ":", "self", ".", "pbar", ".", "write", "(", "self", ".", "names", ",", "table", "=", "True", ")", "self", ".", "losses", ",", "self", ".", "val_losses", ",", "self", ".", "lrs", ",", "self", ".", "moms", ",", "self", ".", "metrics", ",", "self", ".", "nb_batches", "=", "[", "]", ",", "[", "]", ",", "[", "]", ",", "[", "]", ",", "[", "]", ",", "[", "]" ]
Initialize recording status at beginning of training.
[ "Initialize", "recording", "status", "at", "beginning", "of", "training", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/basic_train.py#L447-L456
20,890
fastai/fastai
fastai/basic_train.py
Recorder.on_batch_begin
def on_batch_begin(self, train, **kwargs:Any)->None: "Record learning rate and momentum at beginning of batch." if train: self.lrs.append(self.opt.lr) self.moms.append(self.opt.mom)
python
def on_batch_begin(self, train, **kwargs:Any)->None: "Record learning rate and momentum at beginning of batch." if train: self.lrs.append(self.opt.lr) self.moms.append(self.opt.mom)
[ "def", "on_batch_begin", "(", "self", ",", "train", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "None", ":", "if", "train", ":", "self", ".", "lrs", ".", "append", "(", "self", ".", "opt", ".", "lr", ")", "self", ".", "moms", ".", "append", "(", "self", ".", "opt", ".", "mom", ")" ]
Record learning rate and momentum at beginning of batch.
[ "Record", "learning", "rate", "and", "momentum", "at", "beginning", "of", "batch", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/basic_train.py#L461-L465
20,891
fastai/fastai
fastai/basic_train.py
Recorder.on_backward_begin
def on_backward_begin(self, smooth_loss:Tensor, **kwargs:Any)->None: "Record the loss before any other callback has a chance to modify it." self.losses.append(smooth_loss) if self.pbar is not None and hasattr(self.pbar,'child'): self.pbar.child.comment = f'{smooth_loss:.4f}'
python
def on_backward_begin(self, smooth_loss:Tensor, **kwargs:Any)->None: "Record the loss before any other callback has a chance to modify it." self.losses.append(smooth_loss) if self.pbar is not None and hasattr(self.pbar,'child'): self.pbar.child.comment = f'{smooth_loss:.4f}'
[ "def", "on_backward_begin", "(", "self", ",", "smooth_loss", ":", "Tensor", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "None", ":", "self", ".", "losses", ".", "append", "(", "smooth_loss", ")", "if", "self", ".", "pbar", "is", "not", "None", "and", "hasattr", "(", "self", ".", "pbar", ",", "'child'", ")", ":", "self", ".", "pbar", ".", "child", ".", "comment", "=", "f'{smooth_loss:.4f}'" ]
Record the loss before any other callback has a chance to modify it.
[ "Record", "the", "loss", "before", "any", "other", "callback", "has", "a", "chance", "to", "modify", "it", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/basic_train.py#L467-L471
20,892
fastai/fastai
fastai/basic_train.py
Recorder.format_stats
def format_stats(self, stats:TensorOrNumList)->None: "Format stats before printing." str_stats = [] for name,stat in zip(self.names,stats): str_stats.append('#na#' if stat is None else str(stat) if isinstance(stat, int) else f'{stat:.6f}') if self.add_time: str_stats.append(format_time(time() - self.start_epoch)) if not self.silent: self.pbar.write(str_stats, table=True)
python
def format_stats(self, stats:TensorOrNumList)->None: "Format stats before printing." str_stats = [] for name,stat in zip(self.names,stats): str_stats.append('#na#' if stat is None else str(stat) if isinstance(stat, int) else f'{stat:.6f}') if self.add_time: str_stats.append(format_time(time() - self.start_epoch)) if not self.silent: self.pbar.write(str_stats, table=True)
[ "def", "format_stats", "(", "self", ",", "stats", ":", "TensorOrNumList", ")", "->", "None", ":", "str_stats", "=", "[", "]", "for", "name", ",", "stat", "in", "zip", "(", "self", ".", "names", ",", "stats", ")", ":", "str_stats", ".", "append", "(", "'#na#'", "if", "stat", "is", "None", "else", "str", "(", "stat", ")", "if", "isinstance", "(", "stat", ",", "int", ")", "else", "f'{stat:.6f}'", ")", "if", "self", ".", "add_time", ":", "str_stats", ".", "append", "(", "format_time", "(", "time", "(", ")", "-", "self", ".", "start_epoch", ")", ")", "if", "not", "self", ".", "silent", ":", "self", ".", "pbar", ".", "write", "(", "str_stats", ",", "table", "=", "True", ")" ]
Format stats before printing.
[ "Format", "stats", "before", "printing", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/basic_train.py#L482-L488
20,893
fastai/fastai
fastai/basic_train.py
Recorder.add_metric_names
def add_metric_names(self, names): "Add `names` to the inner metric names." if hasattr(self, '_added_met_names'): self._added_met_names += names else: self._added_met_names = names
python
def add_metric_names(self, names): "Add `names` to the inner metric names." if hasattr(self, '_added_met_names'): self._added_met_names += names else: self._added_met_names = names
[ "def", "add_metric_names", "(", "self", ",", "names", ")", ":", "if", "hasattr", "(", "self", ",", "'_added_met_names'", ")", ":", "self", ".", "_added_met_names", "+=", "names", "else", ":", "self", ".", "_added_met_names", "=", "names" ]
Add `names` to the inner metric names.
[ "Add", "names", "to", "the", "inner", "metric", "names", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/basic_train.py#L490-L493
20,894
fastai/fastai
fastai/basic_train.py
Recorder.plot_lr
def plot_lr(self, show_moms=False, skip_start:int=0, skip_end:int=0, return_fig:bool=None)->Optional[plt.Figure]: "Plot learning rate, `show_moms` to include momentum." lrs = self._split_list(self.lrs, skip_start, skip_end) iterations = self._split_list(range_of(self.lrs), skip_start, skip_end) if show_moms: moms = self._split_list(self.moms, skip_start, skip_end) fig, axs = plt.subplots(1,2, figsize=(12,4)) axs[0].plot(iterations, lrs) axs[0].set_xlabel('Iterations') axs[0].set_ylabel('Learning Rate') axs[1].plot(iterations, moms) axs[1].set_xlabel('Iterations') axs[1].set_ylabel('Momentum') else: fig, ax = plt.subplots() ax.plot(iterations, lrs) ax.set_xlabel('Iterations') ax.set_ylabel('Learning Rate') if ifnone(return_fig, defaults.return_fig): return fig if not IN_NOTEBOOK: plot_sixel(fig)
python
def plot_lr(self, show_moms=False, skip_start:int=0, skip_end:int=0, return_fig:bool=None)->Optional[plt.Figure]: "Plot learning rate, `show_moms` to include momentum." lrs = self._split_list(self.lrs, skip_start, skip_end) iterations = self._split_list(range_of(self.lrs), skip_start, skip_end) if show_moms: moms = self._split_list(self.moms, skip_start, skip_end) fig, axs = plt.subplots(1,2, figsize=(12,4)) axs[0].plot(iterations, lrs) axs[0].set_xlabel('Iterations') axs[0].set_ylabel('Learning Rate') axs[1].plot(iterations, moms) axs[1].set_xlabel('Iterations') axs[1].set_ylabel('Momentum') else: fig, ax = plt.subplots() ax.plot(iterations, lrs) ax.set_xlabel('Iterations') ax.set_ylabel('Learning Rate') if ifnone(return_fig, defaults.return_fig): return fig if not IN_NOTEBOOK: plot_sixel(fig)
[ "def", "plot_lr", "(", "self", ",", "show_moms", "=", "False", ",", "skip_start", ":", "int", "=", "0", ",", "skip_end", ":", "int", "=", "0", ",", "return_fig", ":", "bool", "=", "None", ")", "->", "Optional", "[", "plt", ".", "Figure", "]", ":", "lrs", "=", "self", ".", "_split_list", "(", "self", ".", "lrs", ",", "skip_start", ",", "skip_end", ")", "iterations", "=", "self", ".", "_split_list", "(", "range_of", "(", "self", ".", "lrs", ")", ",", "skip_start", ",", "skip_end", ")", "if", "show_moms", ":", "moms", "=", "self", ".", "_split_list", "(", "self", ".", "moms", ",", "skip_start", ",", "skip_end", ")", "fig", ",", "axs", "=", "plt", ".", "subplots", "(", "1", ",", "2", ",", "figsize", "=", "(", "12", ",", "4", ")", ")", "axs", "[", "0", "]", ".", "plot", "(", "iterations", ",", "lrs", ")", "axs", "[", "0", "]", ".", "set_xlabel", "(", "'Iterations'", ")", "axs", "[", "0", "]", ".", "set_ylabel", "(", "'Learning Rate'", ")", "axs", "[", "1", "]", ".", "plot", "(", "iterations", ",", "moms", ")", "axs", "[", "1", "]", ".", "set_xlabel", "(", "'Iterations'", ")", "axs", "[", "1", "]", ".", "set_ylabel", "(", "'Momentum'", ")", "else", ":", "fig", ",", "ax", "=", "plt", ".", "subplots", "(", ")", "ax", ".", "plot", "(", "iterations", ",", "lrs", ")", "ax", ".", "set_xlabel", "(", "'Iterations'", ")", "ax", ".", "set_ylabel", "(", "'Learning Rate'", ")", "if", "ifnone", "(", "return_fig", ",", "defaults", ".", "return_fig", ")", ":", "return", "fig", "if", "not", "IN_NOTEBOOK", ":", "plot_sixel", "(", "fig", ")" ]
Plot learning rate, `show_moms` to include momentum.
[ "Plot", "learning", "rate", "show_moms", "to", "include", "momentum", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/basic_train.py#L495-L514
20,895
fastai/fastai
fastai/basic_train.py
Recorder.plot
def plot(self, skip_start:int=10, skip_end:int=5, suggestion:bool=False, return_fig:bool=None, **kwargs)->Optional[plt.Figure]: "Plot learning rate and losses, trimmed between `skip_start` and `skip_end`. Optionally plot and return min gradient" lrs = self._split_list(self.lrs, skip_start, skip_end) losses = self._split_list(self.losses, skip_start, skip_end) losses = [x.item() for x in losses] if 'k' in kwargs: losses = self.smoothen_by_spline(lrs, losses, **kwargs) fig, ax = plt.subplots(1,1) ax.plot(lrs, losses) ax.set_ylabel("Loss") ax.set_xlabel("Learning Rate") ax.set_xscale('log') ax.xaxis.set_major_formatter(plt.FormatStrFormatter('%.0e')) if suggestion: try: mg = (np.gradient(np.array(losses))).argmin() except: print("Failed to compute the gradients, there might not be enough points.") return print(f"Min numerical gradient: {lrs[mg]:.2E}") ax.plot(lrs[mg],losses[mg],markersize=10,marker='o',color='red') self.min_grad_lr = lrs[mg] if ifnone(return_fig, defaults.return_fig): return fig if not IN_NOTEBOOK: plot_sixel(fig)
python
def plot(self, skip_start:int=10, skip_end:int=5, suggestion:bool=False, return_fig:bool=None, **kwargs)->Optional[plt.Figure]: "Plot learning rate and losses, trimmed between `skip_start` and `skip_end`. Optionally plot and return min gradient" lrs = self._split_list(self.lrs, skip_start, skip_end) losses = self._split_list(self.losses, skip_start, skip_end) losses = [x.item() for x in losses] if 'k' in kwargs: losses = self.smoothen_by_spline(lrs, losses, **kwargs) fig, ax = plt.subplots(1,1) ax.plot(lrs, losses) ax.set_ylabel("Loss") ax.set_xlabel("Learning Rate") ax.set_xscale('log') ax.xaxis.set_major_formatter(plt.FormatStrFormatter('%.0e')) if suggestion: try: mg = (np.gradient(np.array(losses))).argmin() except: print("Failed to compute the gradients, there might not be enough points.") return print(f"Min numerical gradient: {lrs[mg]:.2E}") ax.plot(lrs[mg],losses[mg],markersize=10,marker='o',color='red') self.min_grad_lr = lrs[mg] if ifnone(return_fig, defaults.return_fig): return fig if not IN_NOTEBOOK: plot_sixel(fig)
[ "def", "plot", "(", "self", ",", "skip_start", ":", "int", "=", "10", ",", "skip_end", ":", "int", "=", "5", ",", "suggestion", ":", "bool", "=", "False", ",", "return_fig", ":", "bool", "=", "None", ",", "*", "*", "kwargs", ")", "->", "Optional", "[", "plt", ".", "Figure", "]", ":", "lrs", "=", "self", ".", "_split_list", "(", "self", ".", "lrs", ",", "skip_start", ",", "skip_end", ")", "losses", "=", "self", ".", "_split_list", "(", "self", ".", "losses", ",", "skip_start", ",", "skip_end", ")", "losses", "=", "[", "x", ".", "item", "(", ")", "for", "x", "in", "losses", "]", "if", "'k'", "in", "kwargs", ":", "losses", "=", "self", ".", "smoothen_by_spline", "(", "lrs", ",", "losses", ",", "*", "*", "kwargs", ")", "fig", ",", "ax", "=", "plt", ".", "subplots", "(", "1", ",", "1", ")", "ax", ".", "plot", "(", "lrs", ",", "losses", ")", "ax", ".", "set_ylabel", "(", "\"Loss\"", ")", "ax", ".", "set_xlabel", "(", "\"Learning Rate\"", ")", "ax", ".", "set_xscale", "(", "'log'", ")", "ax", ".", "xaxis", ".", "set_major_formatter", "(", "plt", ".", "FormatStrFormatter", "(", "'%.0e'", ")", ")", "if", "suggestion", ":", "try", ":", "mg", "=", "(", "np", ".", "gradient", "(", "np", ".", "array", "(", "losses", ")", ")", ")", ".", "argmin", "(", ")", "except", ":", "print", "(", "\"Failed to compute the gradients, there might not be enough points.\"", ")", "return", "print", "(", "f\"Min numerical gradient: {lrs[mg]:.2E}\"", ")", "ax", ".", "plot", "(", "lrs", "[", "mg", "]", ",", "losses", "[", "mg", "]", ",", "markersize", "=", "10", ",", "marker", "=", "'o'", ",", "color", "=", "'red'", ")", "self", ".", "min_grad_lr", "=", "lrs", "[", "mg", "]", "if", "ifnone", "(", "return_fig", ",", "defaults", ".", "return_fig", ")", ":", "return", "fig", "if", "not", "IN_NOTEBOOK", ":", "plot_sixel", "(", "fig", ")" ]
Plot learning rate and losses, trimmed between `skip_start` and `skip_end`. Optionally plot and return min gradient
[ "Plot", "learning", "rate", "and", "losses", "trimmed", "between", "skip_start", "and", "skip_end", ".", "Optionally", "plot", "and", "return", "min", "gradient" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/basic_train.py#L523-L545
20,896
fastai/fastai
fastai/basic_train.py
Recorder.plot_losses
def plot_losses(self, skip_start:int=0, skip_end:int=0, return_fig:bool=None)->Optional[plt.Figure]: "Plot training and validation losses." fig, ax = plt.subplots(1,1) losses = self._split_list(self.losses, skip_start, skip_end) iterations = self._split_list(range_of(self.losses), skip_start, skip_end) ax.plot(iterations, losses, label='Train') val_iter = self._split_list_val(np.cumsum(self.nb_batches), skip_start, skip_end) val_losses = self._split_list_val(self.val_losses, skip_start, skip_end) ax.plot(val_iter, val_losses, label='Validation') ax.set_ylabel('Loss') ax.set_xlabel('Batches processed') ax.legend() if ifnone(return_fig, defaults.return_fig): return fig if not IN_NOTEBOOK: plot_sixel(fig)
python
def plot_losses(self, skip_start:int=0, skip_end:int=0, return_fig:bool=None)->Optional[plt.Figure]: "Plot training and validation losses." fig, ax = plt.subplots(1,1) losses = self._split_list(self.losses, skip_start, skip_end) iterations = self._split_list(range_of(self.losses), skip_start, skip_end) ax.plot(iterations, losses, label='Train') val_iter = self._split_list_val(np.cumsum(self.nb_batches), skip_start, skip_end) val_losses = self._split_list_val(self.val_losses, skip_start, skip_end) ax.plot(val_iter, val_losses, label='Validation') ax.set_ylabel('Loss') ax.set_xlabel('Batches processed') ax.legend() if ifnone(return_fig, defaults.return_fig): return fig if not IN_NOTEBOOK: plot_sixel(fig)
[ "def", "plot_losses", "(", "self", ",", "skip_start", ":", "int", "=", "0", ",", "skip_end", ":", "int", "=", "0", ",", "return_fig", ":", "bool", "=", "None", ")", "->", "Optional", "[", "plt", ".", "Figure", "]", ":", "fig", ",", "ax", "=", "plt", ".", "subplots", "(", "1", ",", "1", ")", "losses", "=", "self", ".", "_split_list", "(", "self", ".", "losses", ",", "skip_start", ",", "skip_end", ")", "iterations", "=", "self", ".", "_split_list", "(", "range_of", "(", "self", ".", "losses", ")", ",", "skip_start", ",", "skip_end", ")", "ax", ".", "plot", "(", "iterations", ",", "losses", ",", "label", "=", "'Train'", ")", "val_iter", "=", "self", ".", "_split_list_val", "(", "np", ".", "cumsum", "(", "self", ".", "nb_batches", ")", ",", "skip_start", ",", "skip_end", ")", "val_losses", "=", "self", ".", "_split_list_val", "(", "self", ".", "val_losses", ",", "skip_start", ",", "skip_end", ")", "ax", ".", "plot", "(", "val_iter", ",", "val_losses", ",", "label", "=", "'Validation'", ")", "ax", ".", "set_ylabel", "(", "'Loss'", ")", "ax", ".", "set_xlabel", "(", "'Batches processed'", ")", "ax", ".", "legend", "(", ")", "if", "ifnone", "(", "return_fig", ",", "defaults", ".", "return_fig", ")", ":", "return", "fig", "if", "not", "IN_NOTEBOOK", ":", "plot_sixel", "(", "fig", ")" ]
Plot training and validation losses.
[ "Plot", "training", "and", "validation", "losses", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/basic_train.py#L547-L560
20,897
fastai/fastai
fastai/basic_train.py
Recorder.plot_metrics
def plot_metrics(self, skip_start:int=0, skip_end:int=0, return_fig:bool=None)->Optional[plt.Figure]: "Plot metrics collected during training." assert len(self.metrics) != 0, "There are no metrics to plot." fig, axes = plt.subplots(len(self.metrics[0]),1,figsize=(6, 4*len(self.metrics[0]))) val_iter = self._split_list_val(np.cumsum(self.nb_batches), skip_start, skip_end) axes = axes.flatten() if len(self.metrics[0]) != 1 else [axes] for i, ax in enumerate(axes): values = [met[i] for met in self.metrics] values = self._split_list_val(values, skip_start, skip_end) ax.plot(val_iter, values) ax.set_ylabel(str(self.metrics_names[i])) ax.set_xlabel('Batches processed') if ifnone(return_fig, defaults.return_fig): return fig if not IN_NOTEBOOK: plot_sixel(fig)
python
def plot_metrics(self, skip_start:int=0, skip_end:int=0, return_fig:bool=None)->Optional[plt.Figure]: "Plot metrics collected during training." assert len(self.metrics) != 0, "There are no metrics to plot." fig, axes = plt.subplots(len(self.metrics[0]),1,figsize=(6, 4*len(self.metrics[0]))) val_iter = self._split_list_val(np.cumsum(self.nb_batches), skip_start, skip_end) axes = axes.flatten() if len(self.metrics[0]) != 1 else [axes] for i, ax in enumerate(axes): values = [met[i] for met in self.metrics] values = self._split_list_val(values, skip_start, skip_end) ax.plot(val_iter, values) ax.set_ylabel(str(self.metrics_names[i])) ax.set_xlabel('Batches processed') if ifnone(return_fig, defaults.return_fig): return fig if not IN_NOTEBOOK: plot_sixel(fig)
[ "def", "plot_metrics", "(", "self", ",", "skip_start", ":", "int", "=", "0", ",", "skip_end", ":", "int", "=", "0", ",", "return_fig", ":", "bool", "=", "None", ")", "->", "Optional", "[", "plt", ".", "Figure", "]", ":", "assert", "len", "(", "self", ".", "metrics", ")", "!=", "0", ",", "\"There are no metrics to plot.\"", "fig", ",", "axes", "=", "plt", ".", "subplots", "(", "len", "(", "self", ".", "metrics", "[", "0", "]", ")", ",", "1", ",", "figsize", "=", "(", "6", ",", "4", "*", "len", "(", "self", ".", "metrics", "[", "0", "]", ")", ")", ")", "val_iter", "=", "self", ".", "_split_list_val", "(", "np", ".", "cumsum", "(", "self", ".", "nb_batches", ")", ",", "skip_start", ",", "skip_end", ")", "axes", "=", "axes", ".", "flatten", "(", ")", "if", "len", "(", "self", ".", "metrics", "[", "0", "]", ")", "!=", "1", "else", "[", "axes", "]", "for", "i", ",", "ax", "in", "enumerate", "(", "axes", ")", ":", "values", "=", "[", "met", "[", "i", "]", "for", "met", "in", "self", ".", "metrics", "]", "values", "=", "self", ".", "_split_list_val", "(", "values", ",", "skip_start", ",", "skip_end", ")", "ax", ".", "plot", "(", "val_iter", ",", "values", ")", "ax", ".", "set_ylabel", "(", "str", "(", "self", ".", "metrics_names", "[", "i", "]", ")", ")", "ax", ".", "set_xlabel", "(", "'Batches processed'", ")", "if", "ifnone", "(", "return_fig", ",", "defaults", ".", "return_fig", ")", ":", "return", "fig", "if", "not", "IN_NOTEBOOK", ":", "plot_sixel", "(", "fig", ")" ]
Plot metrics collected during training.
[ "Plot", "metrics", "collected", "during", "training", "." ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/basic_train.py#L562-L575
20,898
fastai/fastai
fastai/script.py
call_parse
def call_parse(func): "Decorator to create a simple CLI from `func` using `anno_parser`" name = inspect.currentframe().f_back.f_globals['__name__'] if name == "__main__": args = anno_parser(func).parse_args() func(**args.__dict__) else: return func
python
def call_parse(func): "Decorator to create a simple CLI from `func` using `anno_parser`" name = inspect.currentframe().f_back.f_globals['__name__'] if name == "__main__": args = anno_parser(func).parse_args() func(**args.__dict__) else: return func
[ "def", "call_parse", "(", "func", ")", ":", "name", "=", "inspect", ".", "currentframe", "(", ")", ".", "f_back", ".", "f_globals", "[", "'__name__'", "]", "if", "name", "==", "\"__main__\"", ":", "args", "=", "anno_parser", "(", "func", ")", ".", "parse_args", "(", ")", "func", "(", "*", "*", "args", ".", "__dict__", ")", "else", ":", "return", "func" ]
Decorator to create a simple CLI from `func` using `anno_parser`
[ "Decorator", "to", "create", "a", "simple", "CLI", "from", "func", "using", "anno_parser" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/script.py#L35-L41
20,899
fastai/fastai
fastai/script.py
call_plac
def call_plac(f): "Decorator to create a simple CLI from `func` using `plac`" name = inspect.currentframe().f_back.f_globals['__name__'] if name == '__main__': import plac res = plac.call(f) if callable(res): res() else: return f
python
def call_plac(f): "Decorator to create a simple CLI from `func` using `plac`" name = inspect.currentframe().f_back.f_globals['__name__'] if name == '__main__': import plac res = plac.call(f) if callable(res): res() else: return f
[ "def", "call_plac", "(", "f", ")", ":", "name", "=", "inspect", ".", "currentframe", "(", ")", ".", "f_back", ".", "f_globals", "[", "'__name__'", "]", "if", "name", "==", "'__main__'", ":", "import", "plac", "res", "=", "plac", ".", "call", "(", "f", ")", "if", "callable", "(", "res", ")", ":", "res", "(", ")", "else", ":", "return", "f" ]
Decorator to create a simple CLI from `func` using `plac`
[ "Decorator", "to", "create", "a", "simple", "CLI", "from", "func", "using", "plac" ]
9fb84a5cdefe5a766cdb792b8f5d8971737b7e67
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/script.py#L43-L50