idx
int64 0
63k
| question
stringlengths 61
4.03k
| target
stringlengths 6
1.23k
|
|---|---|---|
7,700
|
def data_from_dataset ( dataset , X_indexing = None , y_indexing = None ) : X , y = _none , _none if isinstance ( dataset , Subset ) : X , y = data_from_dataset ( dataset . dataset , X_indexing = X_indexing , y_indexing = y_indexing ) X = multi_indexing ( X , dataset . indices , indexing = X_indexing ) y = multi_indexing ( y , dataset . indices , indexing = y_indexing ) elif hasattr ( dataset , 'X' ) and hasattr ( dataset , 'y' ) : X , y = dataset . X , dataset . y if ( X is _none ) or ( y is _none ) : raise AttributeError ( "Could not access X and y from dataset." ) return X , y
|
Try to access X and y attribute from dataset .
|
7,701
|
def is_skorch_dataset ( ds ) : from skorch . dataset import Dataset if isinstance ( ds , Subset ) : return is_skorch_dataset ( ds . dataset ) return isinstance ( ds , Dataset )
|
Checks if the supplied dataset is an instance of skorch . dataset . Dataset even when it is nested inside torch . util . data . Subset .
|
7,702
|
def open_file_like ( f , mode ) : new_fd = isinstance ( f , ( str , pathlib . Path ) ) if new_fd : f = open ( f , mode ) try : yield f finally : if new_fd : f . close ( )
|
Wrapper for opening a file
|
7,703
|
def cache_net_infer ( net , use_caching , y_preds ) : if not use_caching : yield net return y_preds = iter ( y_preds ) net . infer = lambda * a , ** kw : next ( y_preds ) try : yield net finally : del net . __dict__ [ 'infer' ]
|
Caching context for skorch . NeuralNet instance . Returns a modified version of the net whose infer method will subsequently return cached predictions . Leaving the context will undo the overwrite of the infer method .
|
7,704
|
def convert_sklearn_metric_function ( scoring ) : if callable ( scoring ) : module = getattr ( scoring , '__module__' , None ) if ( hasattr ( module , 'startswith' ) and module . startswith ( 'sklearn.metrics.' ) and not module . startswith ( 'sklearn.metrics.scorer' ) and not module . startswith ( 'sklearn.metrics.tests.' ) ) : return make_scorer ( scoring ) return scoring
|
If scoring is a sklearn metric function convert it to a sklearn scorer and return it . Otherwise return scoring unchanged .
|
7,705
|
def _get_name ( self ) : if self . name is not None : return self . name if self . scoring_ is None : return 'score' if isinstance ( self . scoring_ , str ) : return self . scoring_ if isinstance ( self . scoring_ , partial ) : return self . scoring_ . func . __name__ if isinstance ( self . scoring_ , _BaseScorer ) : return self . scoring_ . _score_func . __name__ return self . scoring_ . __name__
|
Find name of scoring function .
|
7,706
|
def _scoring ( self , net , X_test , y_test ) : scorer = check_scoring ( net , self . scoring_ ) scores = _score ( estimator = net , X_test = X_test , y_test = y_test , scorer = scorer , is_multimetric = False , ) return scores
|
Resolve scoring and apply it to data . Use cached prediction instead of running inference again if available .
|
7,707
|
def _record_score ( self , history , current_score ) : history . record ( self . name_ , current_score ) is_best = self . _is_best_score ( current_score ) if is_best is None : return history . record ( self . name_ + '_best' , bool ( is_best ) ) if is_best : self . best_score_ = current_score
|
Record the current store and if applicable if it s the best score yet .
|
7,708
|
def calcuate_bboxes ( im_shape , patch_size ) : h , w = im_shape ph , pw = patch_size steps_h = chain ( range ( 0 , h - ph , ph ) , [ h - ph ] ) steps_w = chain ( range ( 0 , w - pw , pw ) , [ w - pw ] ) return product ( steps_h , steps_w )
|
Calculate bound boxes based on image shape and size of the bounding box given by patch_size
|
7,709
|
def make_classifier ( output_nonlin = nn . Softmax ( dim = - 1 ) , ** kwargs ) : return partial ( MLPModule , output_nonlin = output_nonlin , ** kwargs )
|
Return a multi - layer perceptron to be used with NeuralNetClassifier .
|
7,710
|
def notify ( self , method_name , ** cb_kwargs ) : getattr ( self , method_name ) ( self , ** cb_kwargs ) for _ , cb in self . callbacks_ : getattr ( cb , method_name ) ( self , ** cb_kwargs )
|
Call the callback method specified in method_name with parameters specified in cb_kwargs .
|
7,711
|
def _yield_callbacks ( self ) : print_logs = [ ] for item in self . get_default_callbacks ( ) + ( self . callbacks or [ ] ) : if isinstance ( item , ( tuple , list ) ) : named_by_user = True name , cb = item else : named_by_user = False cb = item if isinstance ( cb , type ) : name = cb . __name__ else : name = cb . __class__ . __name__ if isinstance ( cb , PrintLog ) or ( cb == PrintLog ) : print_logs . append ( ( name , cb , named_by_user ) ) else : yield name , cb , named_by_user yield from print_logs
|
Yield all callbacks set on this instance including a set whether its name was set by the user .
|
7,712
|
def _callbacks_grouped_by_name ( self ) : callbacks , names_set_by_user = OrderedDict ( ) , set ( ) for name , cb , named_by_user in self . _yield_callbacks ( ) : if named_by_user : names_set_by_user . add ( name ) callbacks [ name ] = callbacks . get ( name , [ ] ) + [ cb ] return callbacks , names_set_by_user
|
Group callbacks by name and collect names set by the user .
|
7,713
|
def _uniquely_named_callbacks ( self ) : grouped_cbs , names_set_by_user = self . _callbacks_grouped_by_name ( ) for name , cbs in grouped_cbs . items ( ) : if len ( cbs ) > 1 and name in names_set_by_user : raise ValueError ( "Found duplicate user-set callback name " "'{}'. Use unique names to correct this." . format ( name ) ) for i , cb in enumerate ( cbs ) : if len ( cbs ) > 1 : unique_name = '{}_{}' . format ( name , i + 1 ) if unique_name in grouped_cbs : raise ValueError ( "Assigning new callback name failed " "since new name '{}' exists already." . format ( unique_name ) ) else : unique_name = name yield unique_name , cb
|
Make sure that the returned dict of named callbacks is unique w . r . t . to the callback name . User - defined names will not be renamed on conflict instead an exception will be raised . The same goes for the event where renaming leads to a conflict .
|
7,714
|
def initialize_callbacks ( self ) : callbacks_ = [ ] class Dummy : pass for name , cb in self . _uniquely_named_callbacks ( ) : param_callback = getattr ( self , 'callbacks__' + name , Dummy ) if param_callback is not Dummy : cb = param_callback params = self . _get_params_for ( 'callbacks__{}' . format ( name ) ) if ( cb is None ) and params : raise ValueError ( "Trying to set a parameter for callback {} " "which does not exist." . format ( name ) ) if cb is None : continue if isinstance ( cb , type ) : cb = cb ( ** params ) else : cb . set_params ( ** params ) cb . initialize ( ) callbacks_ . append ( ( name , cb ) ) self . callbacks_ = callbacks_ return self
|
Initializes all callbacks and save the result in the callbacks_ attribute .
|
7,715
|
def initialize_criterion ( self ) : criterion_params = self . _get_params_for ( 'criterion' ) self . criterion_ = self . criterion ( ** criterion_params ) if isinstance ( self . criterion_ , torch . nn . Module ) : self . criterion_ = self . criterion_ . to ( self . device ) return self
|
Initializes the criterion .
|
7,716
|
def _format_reinit_msg ( self , name , kwargs = None , triggered_directly = True ) : msg = "Re-initializing {}" . format ( name ) if triggered_directly and kwargs : msg += ( " because the following parameters were re-set: {}." . format ( ', ' . join ( sorted ( kwargs ) ) ) ) else : msg += "." return msg
|
Returns a message that informs about re - initializing a compoment .
|
7,717
|
def initialize_module ( self ) : kwargs = self . _get_params_for ( 'module' ) module = self . module is_initialized = isinstance ( module , torch . nn . Module ) if kwargs or not is_initialized : if is_initialized : module = type ( module ) if ( is_initialized or self . initialized_ ) and self . verbose : msg = self . _format_reinit_msg ( "module" , kwargs ) print ( msg ) module = module ( ** kwargs ) self . module_ = module . to ( self . device ) return self
|
Initializes the module .
|
7,718
|
def validation_step ( self , Xi , yi , ** fit_params ) : self . module_ . eval ( ) with torch . no_grad ( ) : y_pred = self . infer ( Xi , ** fit_params ) loss = self . get_loss ( y_pred , yi , X = Xi , training = False ) return { 'loss' : loss , 'y_pred' : y_pred , }
|
Perform a forward step using batched data and return the resulting loss .
|
7,719
|
def train_step_single ( self , Xi , yi , ** fit_params ) : self . module_ . train ( ) self . optimizer_ . zero_grad ( ) y_pred = self . infer ( Xi , ** fit_params ) loss = self . get_loss ( y_pred , yi , X = Xi , training = True ) loss . backward ( ) self . notify ( 'on_grad_computed' , named_parameters = TeeGenerator ( self . module_ . named_parameters ( ) ) , X = Xi , y = yi ) return { 'loss' : loss , 'y_pred' : y_pred , }
|
Compute y_pred loss value and update net s gradients .
|
7,720
|
def train_step ( self , Xi , yi , ** fit_params ) : step_accumulator = self . get_train_step_accumulator ( ) def step_fn ( ) : step = self . train_step_single ( Xi , yi , ** fit_params ) step_accumulator . store_step ( step ) return step [ 'loss' ] self . optimizer_ . step ( step_fn ) return step_accumulator . get_step ( )
|
Prepares a loss function callable and pass it to the optimizer hence performing one optimization step .
|
7,721
|
def evaluation_step ( self , Xi , training = False ) : with torch . set_grad_enabled ( training ) : self . module_ . train ( training ) return self . infer ( Xi )
|
Perform a forward step to produce the output used for prediction and scoring .
|
7,722
|
def fit_loop ( self , X , y = None , epochs = None , ** fit_params ) : self . check_data ( X , y ) epochs = epochs if epochs is not None else self . max_epochs dataset_train , dataset_valid = self . get_split_datasets ( X , y , ** fit_params ) on_epoch_kwargs = { 'dataset_train' : dataset_train , 'dataset_valid' : dataset_valid , } y_train_is_ph = uses_placeholder_y ( dataset_train ) y_valid_is_ph = uses_placeholder_y ( dataset_valid ) for _ in range ( epochs ) : self . notify ( 'on_epoch_begin' , ** on_epoch_kwargs ) for data in self . get_iterator ( dataset_train , training = True ) : Xi , yi = unpack_data ( data ) yi_res = yi if not y_train_is_ph else None self . notify ( 'on_batch_begin' , X = Xi , y = yi_res , training = True ) step = self . train_step ( Xi , yi , ** fit_params ) self . history . record_batch ( 'train_loss' , step [ 'loss' ] . item ( ) ) self . history . record_batch ( 'train_batch_size' , get_len ( Xi ) ) self . notify ( 'on_batch_end' , X = Xi , y = yi_res , training = True , ** step ) if dataset_valid is None : self . notify ( 'on_epoch_end' , ** on_epoch_kwargs ) continue for data in self . get_iterator ( dataset_valid , training = False ) : Xi , yi = unpack_data ( data ) yi_res = yi if not y_valid_is_ph else None self . notify ( 'on_batch_begin' , X = Xi , y = yi_res , training = False ) step = self . validation_step ( Xi , yi , ** fit_params ) self . history . record_batch ( 'valid_loss' , step [ 'loss' ] . item ( ) ) self . history . record_batch ( 'valid_batch_size' , get_len ( Xi ) ) self . notify ( 'on_batch_end' , X = Xi , y = yi_res , training = False , ** step ) self . notify ( 'on_epoch_end' , ** on_epoch_kwargs ) return self
|
The proper fit loop .
|
7,723
|
def partial_fit ( self , X , y = None , classes = None , ** fit_params ) : if not self . initialized_ : self . initialize ( ) self . notify ( 'on_train_begin' , X = X , y = y ) try : self . fit_loop ( X , y , ** fit_params ) except KeyboardInterrupt : pass self . notify ( 'on_train_end' , X = X , y = y ) return self
|
Fit the module .
|
7,724
|
def fit ( self , X , y = None , ** fit_params ) : if not self . warm_start or not self . initialized_ : self . initialize ( ) self . partial_fit ( X , y , ** fit_params ) return self
|
Initialize and fit the module .
|
7,725
|
def forward_iter ( self , X , training = False , device = 'cpu' ) : dataset = self . get_dataset ( X ) iterator = self . get_iterator ( dataset , training = training ) for data in iterator : Xi = unpack_data ( data ) [ 0 ] yp = self . evaluation_step ( Xi , training = training ) if isinstance ( yp , tuple ) : yield tuple ( n . to ( device ) for n in yp ) else : yield yp . to ( device )
|
Yield outputs of module forward calls on each batch of data . The storage device of the yielded tensors is determined by the device parameter .
|
7,726
|
def forward ( self , X , training = False , device = 'cpu' ) : y_infer = list ( self . forward_iter ( X , training = training , device = device ) ) is_multioutput = len ( y_infer ) > 0 and isinstance ( y_infer [ 0 ] , tuple ) if is_multioutput : return tuple ( map ( torch . cat , zip ( * y_infer ) ) ) return torch . cat ( y_infer )
|
Gather and concatenate the output from forward call with input data .
|
7,727
|
def infer ( self , x , ** fit_params ) : x = to_tensor ( x , device = self . device ) if isinstance ( x , dict ) : x_dict = self . _merge_x_and_fit_params ( x , fit_params ) return self . module_ ( ** x_dict ) return self . module_ ( x , ** fit_params )
|
Perform a single inference step on a batch of data .
|
7,728
|
def predict_proba ( self , X ) : y_probas = [ ] for yp in self . forward_iter ( X , training = False ) : yp = yp [ 0 ] if isinstance ( yp , tuple ) else yp y_probas . append ( to_numpy ( yp ) ) y_proba = np . concatenate ( y_probas , 0 ) return y_proba
|
Return the output of the module s forward method as a numpy array .
|
7,729
|
def get_loss ( self , y_pred , y_true , X = None , training = False ) : y_true = to_tensor ( y_true , device = self . device ) return self . criterion_ ( y_pred , y_true )
|
Return the loss for this batch .
|
7,730
|
def get_dataset ( self , X , y = None ) : if is_dataset ( X ) : return X dataset = self . dataset is_initialized = not callable ( dataset ) kwargs = self . _get_params_for ( 'dataset' ) if kwargs and is_initialized : raise TypeError ( "Trying to pass an initialized Dataset while " "passing Dataset arguments ({}) is not " "allowed." . format ( kwargs ) ) if is_initialized : return dataset return dataset ( X , y , ** kwargs )
|
Get a dataset that contains the input data and is passed to the iterator .
|
7,731
|
def get_split_datasets ( self , X , y = None , ** fit_params ) : dataset = self . get_dataset ( X , y ) if self . train_split : dataset_train , dataset_valid = self . train_split ( dataset , y , ** fit_params ) else : dataset_train , dataset_valid = dataset , None return dataset_train , dataset_valid
|
Get internal train and validation datasets .
|
7,732
|
def get_iterator ( self , dataset , training = False ) : if training : kwargs = self . _get_params_for ( 'iterator_train' ) iterator = self . iterator_train else : kwargs = self . _get_params_for ( 'iterator_valid' ) iterator = self . iterator_valid if 'batch_size' not in kwargs : kwargs [ 'batch_size' ] = self . batch_size if kwargs [ 'batch_size' ] == - 1 : kwargs [ 'batch_size' ] = len ( dataset ) return iterator ( dataset , ** kwargs )
|
Get an iterator that allows to loop over the batches of the given data .
|
7,733
|
def set_params ( self , ** kwargs ) : self . _check_deprecated_params ( ** kwargs ) normal_params , cb_params , special_params = { } , { } , { } virtual_params = { } for key , val in kwargs . items ( ) : if self . _is_virtual_param ( key ) : virtual_params [ key ] = val elif key . startswith ( 'callbacks' ) : cb_params [ key ] = val elif any ( key . startswith ( prefix ) for prefix in self . prefixes_ ) : special_params [ key ] = val else : normal_params [ key ] = val self . _apply_virtual_params ( virtual_params ) BaseEstimator . set_params ( self , ** normal_params ) for key , val in special_params . items ( ) : if key . endswith ( '_' ) : raise ValueError ( "Something went wrong here. Please open an issue on " "https://github.com/dnouri/skorch/issues detailing what " "caused this error." ) else : setattr ( self , key , val ) if cb_params : self . initialize_callbacks ( ) self . _set_params_callback ( ** cb_params ) if any ( key . startswith ( 'criterion' ) for key in special_params ) : self . initialize_criterion ( ) module_triggers_optimizer_reinit = False if any ( key . startswith ( 'module' ) for key in special_params ) : self . initialize_module ( ) module_triggers_optimizer_reinit = True optimizer_changed = ( any ( key . startswith ( 'optimizer' ) for key in special_params ) or 'lr' in normal_params ) if module_triggers_optimizer_reinit or optimizer_changed : if not hasattr ( self , 'module_' ) : self . initialize_module ( ) self . initialize_optimizer ( triggered_directly = optimizer_changed ) vars ( self ) . update ( kwargs ) return self
|
Set the parameters of this class .
|
7,734
|
def _set_params_callback ( self , ** params ) : if 'callbacks' in params : setattr ( self , 'callbacks' , params . pop ( 'callbacks' ) ) names , _ = zip ( * getattr ( self , 'callbacks_' ) ) for key in params . copy ( ) : name = key [ 11 : ] if '__' not in name and name in names : self . _replace_callback ( name , params . pop ( key ) ) for key in params . copy ( ) : name = key [ 11 : ] part0 , part1 = name . split ( '__' ) kwarg = { part1 : params . pop ( key ) } callback = dict ( self . callbacks_ ) . get ( part0 ) if callback is not None : callback . set_params ( ** kwarg ) else : raise ValueError ( "Trying to set a parameter for callback {} " "which does not exist." . format ( part0 ) ) return self
|
Special handling for setting params on callbacks .
|
7,735
|
def save_params ( self , f = None , f_params = None , f_optimizer = None , f_history = None ) : if f is not None : warnings . warn ( "f argument was renamed to f_params and will be removed " "in the next release. To make your code future-proof it is " "recommended to explicitly specify keyword arguments' names " "instead of relying on positional order." , DeprecationWarning ) f_params = f if f_params is not None : if not hasattr ( self , 'module_' ) : raise NotInitializedError ( "Cannot save parameters of an un-initialized model. " "Please initialize first by calling .initialize() " "or by fitting the model with .fit(...)." ) torch . save ( self . module_ . state_dict ( ) , f_params ) if f_optimizer is not None : if not hasattr ( self , 'optimizer_' ) : raise NotInitializedError ( "Cannot save state of an un-initialized optimizer. " "Please initialize first by calling .initialize() " "or by fitting the model with .fit(...)." ) torch . save ( self . optimizer_ . state_dict ( ) , f_optimizer ) if f_history is not None : self . history . to_file ( f_history )
|
Saves the module s parameters history and optimizer not the whole object .
|
7,736
|
def _check_device ( self , requested_device , map_device ) : type_1 = torch . device ( requested_device ) type_2 = torch . device ( map_device ) if type_1 != type_2 : warnings . warn ( 'Setting self.device = {} since the requested device ({}) ' 'is not available.' . format ( map_device , requested_device ) , DeviceWarning ) return map_device return requested_device
|
Compare the requested device with the map device and return the map device if it differs from the requested device along with a warning .
|
7,737
|
def load_params ( self , f = None , f_params = None , f_optimizer = None , f_history = None , checkpoint = None ) : def _get_state_dict ( f ) : map_location = get_map_location ( self . device ) self . device = self . _check_device ( self . device , map_location ) return torch . load ( f , map_location = map_location ) if f is not None : warnings . warn ( "f is deprecated in save_params and will be removed in the " "next release, please use f_params instead" , DeprecationWarning ) f_params = f if f_history is not None : self . history = History . from_file ( f_history ) if checkpoint is not None : if f_history is None and checkpoint . f_history is not None : self . history = History . from_file ( checkpoint . f_history_ ) formatted_files = checkpoint . get_formatted_files ( self ) f_params = f_params or formatted_files [ 'f_params' ] f_optimizer = f_optimizer or formatted_files [ 'f_optimizer' ] if f_params is not None : if not hasattr ( self , 'module_' ) : raise NotInitializedError ( "Cannot load parameters of an un-initialized model. " "Please initialize first by calling .initialize() " "or by fitting the model with .fit(...)." ) state_dict = _get_state_dict ( f_params ) self . module_ . load_state_dict ( state_dict ) if f_optimizer is not None : if not hasattr ( self , 'optimizer_' ) : raise NotInitializedError ( "Cannot load state of an un-initialized optimizer. " "Please initialize first by calling .initialize() " "or by fitting the model with .fit(...)." ) state_dict = _get_state_dict ( f_optimizer ) self . optimizer_ . load_state_dict ( state_dict )
|
Loads the the module s parameters history and optimizer not the whole object .
|
7,738
|
def save_history ( self , f ) : warnings . warn ( "save_history is deprecated and will be removed in the next " "release, please use save_params with the f_history keyword" , DeprecationWarning ) self . history . to_file ( f )
|
Saves the history of NeuralNet as a json file . In order to use this feature the history must only contain JSON encodable Python data structures . Numpy and PyTorch types should not be in the history .
|
7,739
|
def load_history ( self , f ) : warnings . warn ( "load_history is deprecated and will be removed in the next " "release, please use load_params with the f_history keyword" , DeprecationWarning ) self . history = History . from_file ( f )
|
Load the history of a NeuralNet from a json file . See save_history for examples .
|
7,740
|
def _not_none ( items ) : if not isinstance ( items , ( tuple , list ) ) : items = ( items , ) return all ( item is not _none for item in items )
|
Whether the item is a placeholder or contains a placeholder .
|
7,741
|
def _filter_none ( items ) : type_ = list if isinstance ( items , list ) else tuple return type_ ( filter ( _not_none , items ) )
|
Filter special placeholder value preserves sequence type .
|
7,742
|
def _getitem ( item , i ) : if not isinstance ( i , ( tuple , list ) ) : return item . get ( i , _none ) type_ = list if isinstance ( item , list ) else tuple return type_ ( item . get ( j , _none ) for j in i )
|
Extract value or values from dicts .
|
7,743
|
def _unpack_index ( i ) : if len ( i ) > 4 : raise KeyError ( "Tried to index history with {} indices but only " "4 indices are possible." . format ( len ( i ) ) ) i_e , k_e , i_b , k_b = i + tuple ( [ None ] * ( 4 - len ( i ) ) ) if i_b is not None and not isinstance ( i_b , ( int , slice ) ) : if k_b is not None : raise KeyError ( "The last argument '{}' is invalid; it must be a " "string or tuple of strings." . format ( k_b ) ) warnings . warn ( "Argument 3 to history slicing must be of type int or slice, e.g. " "history[:, 'batches', 'train_loss'] should be " "history[:, 'batches', :, 'train_loss']." , DeprecationWarning , ) i_b , k_b = slice ( None ) , i_b return i_e , k_e , i_b , k_b
|
Unpack index and return exactly four elements .
|
7,744
|
def record ( self , attr , value ) : msg = "Call new_epoch before recording for the first time." if not self : raise ValueError ( msg ) self [ - 1 ] [ attr ] = value
|
Add a new value to the given column for the current epoch .
|
7,745
|
def from_file ( cls , f ) : with open_file_like ( f , 'r' ) as fp : return cls ( json . load ( fp ) )
|
Load the history of a NeuralNet from a json file .
|
7,746
|
def to_file ( self , f ) : with open_file_like ( f , 'w' ) as fp : json . dump ( self . to_list ( ) , fp )
|
Saves the history as a json file . In order to use this feature the history must only contain JSON encodable Python data structures . Numpy and PyTorch types should not be in the history .
|
7,747
|
def get_formatted_files ( self , net ) : idx = - 1 if ( self . event_name is not None and net . history ) : for i , v in enumerate ( net . history [ : , self . event_name ] ) : if v : idx = i return { "f_params" : self . _format_target ( net , self . f_params , idx ) , "f_optimizer" : self . _format_target ( net , self . f_optimizer , idx ) , "f_history" : self . f_history_ , "f_pickle" : self . _format_target ( net , self . f_pickle , idx ) }
|
Returns a dictionary of formatted filenames
|
7,748
|
def _format_target ( self , net , f , idx ) : if f is None : return None if isinstance ( f , str ) : f = self . fn_prefix + f . format ( net = net , last_epoch = net . history [ idx ] , last_batch = net . history [ idx , 'batches' , - 1 ] , ) return os . path . join ( self . dirname , f ) return f
|
Apply formatting to the target filename template .
|
7,749
|
def _validate_filenames ( self ) : if not self . dirname : return def _is_truthy_and_not_str ( f ) : return f and not isinstance ( f , str ) if ( _is_truthy_and_not_str ( self . f_optimizer ) or _is_truthy_and_not_str ( self . f_params ) or _is_truthy_and_not_str ( self . f_history ) or _is_truthy_and_not_str ( self . f_pickle ) ) : raise SkorchException ( 'dirname can only be used when f_* are strings' )
|
Checks if passed filenames are valid .
|
7,750
|
def _calc_new_threshold ( self , score ) : if self . threshold_mode == 'rel' : abs_threshold_change = self . threshold * score else : abs_threshold_change = self . threshold if self . lower_is_better : new_threshold = score - abs_threshold_change else : new_threshold = score + abs_threshold_change return new_threshold
|
Determine threshold based on score .
|
7,751
|
def repackage_hidden ( self , h ) : if isinstance ( h , Variable ) : return torch . tensor ( h . data , device = h . device ) else : return tuple ( self . repackage_hidden ( v ) for v in h )
|
Wraps hidden states in new Variables to detach them from their history .
|
7,752
|
def _set_optimizer_param ( optimizer , param_group , param_name , value ) : if param_group == 'all' : groups = optimizer . param_groups else : groups = [ optimizer . param_groups [ int ( param_group ) ] ] for group in groups : group [ param_name ] = value
|
Set a parameter on an all or a specific parameter group of an optimizer instance . To select all param groups use param_group = all .
|
7,753
|
def optimizer_setter ( net , param , value , optimizer_attr = 'optimizer_' , optimizer_name = 'optimizer' ) : if param == 'lr' : param_group = 'all' param_name = 'lr' net . lr = value else : param_group , param_name = _extract_optimizer_param_name_and_group ( optimizer_name , param ) _set_optimizer_param ( optimizer = getattr ( net , optimizer_attr ) , param_group = param_group , param_name = param_name , value = value )
|
Handle setting of optimizer parameters such as learning rate and parameter group specific parameters such as momentum .
|
7,754
|
def _check_lr ( name , optimizer , lr ) : n = len ( optimizer . param_groups ) if not isinstance ( lr , ( list , tuple ) ) : return lr * np . ones ( n ) if len ( lr ) != n : raise ValueError ( "{} lr values were passed for {} but there are " "{} param groups." . format ( n , name , len ( lr ) ) ) return np . array ( lr )
|
Return one learning rate for each param group .
|
7,755
|
def simulate ( self , steps , initial_lr ) : test = torch . ones ( 1 , requires_grad = True ) opt = torch . optim . SGD ( [ { 'params' : test , 'lr' : initial_lr } ] ) policy_cls = self . _get_policy_cls ( ) sch = policy_cls ( opt , ** self . kwargs ) if hasattr ( sch , 'batch_step' ) and callable ( sch . batch_step ) : step = sch . batch_step else : step = sch . step lrs = [ ] for _ in range ( steps ) : step ( ) lrs . append ( sch . get_lr ( ) [ 0 ] ) return np . array ( lrs )
|
Simulates the learning rate scheduler .
|
7,756
|
def _get_scheduler ( self , net , policy , ** scheduler_kwargs ) : if policy not in [ CyclicLR , ReduceLROnPlateau ] and 'last_epoch' not in scheduler_kwargs : last_epoch = len ( net . history ) - 1 scheduler_kwargs [ 'last_epoch' ] = last_epoch if policy is CyclicLR and 'last_batch_idx' not in scheduler_kwargs : scheduler_kwargs [ 'last_batch_idx' ] = self . batch_idx_ - 1 return policy ( net . optimizer_ , ** scheduler_kwargs )
|
Return scheduler based on indicated policy with appropriate parameters .
|
7,757
|
def is_verified ( self ) : if self . _verified is None : signature = self . _data . get ( 'Signature' ) if not signature : self . _verified = False return self . _verified signature = bytes ( base64 . b64decode ( signature ) ) sign_bytes = self . _get_bytes_to_sign ( ) if not sign_bytes : self . _verified = False return self . _verified if not self . certificate : self . _verified = False return self . _verified pkey = self . certificate . get_pubkey ( ) pkey . verify_init ( ) pkey . verify_update ( sign_bytes ) verify_result = pkey . verify_final ( signature ) self . _verified = verify_result == 1 return self . _verified
|
Verifies an SES bounce message .
|
7,758
|
def certificate ( self ) : if not hasattr ( self , '_certificate' ) : cert_url = self . _get_cert_url ( ) if not cert_url : self . _certificate = None return self . _certificate try : import requests except ImportError : raise ImproperlyConfigured ( "requests is required for bounce message verification." ) try : import M2Crypto except ImportError : raise ImproperlyConfigured ( "M2Crypto is required for bounce message verification." ) response = requests . get ( cert_url ) if response . status_code != 200 : logger . warning ( u'Could not download certificate from %s: "%s"' , cert_url , response . status_code ) self . _certificate = None return self . _certificate try : self . _certificate = M2Crypto . X509 . load_cert_string ( response . content ) except M2Crypto . X509 . X509Error as e : logger . warning ( u'Could not load certificate from %s: "%s"' , cert_url , e ) self . _certificate = None return self . _certificate
|
Retrieves the certificate used to sign the bounce message .
|
7,759
|
def _get_bytes_to_sign ( self ) : msg_type = self . _data . get ( 'Type' ) if msg_type == 'Notification' : fields_to_sign = [ 'Message' , 'MessageId' , 'Subject' , 'Timestamp' , 'TopicArn' , 'Type' , ] elif ( msg_type == 'SubscriptionConfirmation' or msg_type == 'UnsubscribeConfirmation' ) : fields_to_sign = [ 'Message' , 'MessageId' , 'SubscribeURL' , 'Timestamp' , 'Token' , 'TopicArn' , 'Type' , ] else : logger . warning ( u'Unrecognized SNS message Type: "%s"' , msg_type ) return None outbytes = StringIO ( ) for field_name in fields_to_sign : field_value = smart_str ( self . _data . get ( field_name , '' ) , errors = "replace" ) if field_value : outbytes . write ( text ( field_name ) ) outbytes . write ( text ( "\n" ) ) outbytes . write ( text ( field_value ) ) outbytes . write ( text ( "\n" ) ) response = outbytes . getvalue ( ) return bytes ( response , 'utf-8' )
|
Creates the message used for signing SNS notifications . This is used to verify the bounce message when it is received .
|
7,760
|
def superuser_only ( view_func ) : def _inner ( request , * args , ** kwargs ) : if not request . user . is_superuser : raise PermissionDenied return view_func ( request , * args , ** kwargs ) return _inner
|
Limit a view to superuser only .
|
7,761
|
def sum_stats ( stats_data ) : t_bounces = 0 t_complaints = 0 t_delivery_attempts = 0 t_rejects = 0 for dp in stats_data : t_bounces += int ( dp [ 'Bounces' ] ) t_complaints += int ( dp [ 'Complaints' ] ) t_delivery_attempts += int ( dp [ 'DeliveryAttempts' ] ) t_rejects += int ( dp [ 'Rejects' ] ) return { 'Bounces' : t_bounces , 'Complaints' : t_complaints , 'DeliveryAttempts' : t_delivery_attempts , 'Rejects' : t_rejects , }
|
Summarize the bounces complaints delivery attempts and rejects from a list of datapoints .
|
7,762
|
def dashboard ( request ) : cache_key = 'vhash:django_ses_stats' cached_view = cache . get ( cache_key ) if cached_view : return cached_view region = RegionInfo ( name = settings . AWS_SES_REGION_NAME , endpoint = settings . AWS_SES_REGION_ENDPOINT ) ses_conn = SESConnection ( aws_access_key_id = settings . ACCESS_KEY , aws_secret_access_key = settings . SECRET_KEY , region = region , proxy = settings . AWS_SES_PROXY , proxy_port = settings . AWS_SES_PROXY_PORT , ) quota_dict = ses_conn . get_send_quota ( ) verified_emails_dict = ses_conn . list_verified_email_addresses ( ) stats = ses_conn . get_send_statistics ( ) quota = quota_parse ( quota_dict ) verified_emails = emails_parse ( verified_emails_dict ) ordered_data = stats_to_list ( stats ) summary = sum_stats ( ordered_data ) extra_context = { 'title' : 'SES Statistics' , 'datapoints' : ordered_data , '24hour_quota' : quota [ 'Max24HourSend' ] , '24hour_sent' : quota [ 'SentLast24Hours' ] , '24hour_remaining' : float ( quota [ 'Max24HourSend' ] ) - float ( quota [ 'SentLast24Hours' ] ) , 'persecond_rate' : quota [ 'MaxSendRate' ] , 'verified_emails' : verified_emails , 'summary' : summary , 'access_key' : ses_conn . gs_access_key_id , 'local_time' : True , } response = render ( request , 'django_ses/send_stats.html' , extra_context ) cache . set ( cache_key , response , 60 * 15 ) return response
|
Graph SES send statistics over time .
|
7,763
|
def dkim_sign ( message , dkim_domain = None , dkim_key = None , dkim_selector = None , dkim_headers = None ) : try : import dkim except ImportError : pass else : if dkim_domain and dkim_key : sig = dkim . sign ( message , dkim_selector , dkim_domain , dkim_key , include_headers = dkim_headers ) message = sig + message return message
|
Return signed email message if dkim package and settings are available .
|
7,764
|
def open ( self ) : if self . connection : return False try : self . connection = SESConnection ( aws_access_key_id = self . _access_key_id , aws_secret_access_key = self . _access_key , region = self . _region , proxy = self . _proxy , proxy_port = self . _proxy_port , proxy_user = self . _proxy_user , proxy_pass = self . _proxy_pass , ) except Exception : if not self . fail_silently : raise
|
Create a connection to the AWS API server . This can be reused for sending multiple emails .
|
7,765
|
def close ( self ) : try : self . connection . close ( ) self . connection = None except Exception : if not self . fail_silently : raise
|
Close any open HTTP connections to the API server .
|
7,766
|
def set_to_tuple ( tokens ) : internal_assert ( len ( tokens ) == 1 , "invalid set maker tokens" , tokens ) if "comp" in tokens or "list" in tokens : return "(" + tokens [ 0 ] + ")" elif "test" in tokens : return "(" + tokens [ 0 ] + ",)" else : raise CoconutInternalException ( "invalid set maker item" , tokens [ 0 ] )
|
Converts set literal tokens to tuples .
|
7,767
|
def single_import ( path , imp_as ) : out = [ ] parts = path . split ( "./" ) if len ( parts ) == 1 : imp_from , imp = None , parts [ 0 ] else : imp_from , imp = parts if imp == imp_as : imp_as = None elif imp . endswith ( "." + imp_as ) : if imp_from is None : imp_from = "" imp_from += imp . rsplit ( "." + imp_as , 1 ) [ 0 ] imp , imp_as = imp_as , None if imp_from is None and imp == "sys" : out . append ( ( imp_as if imp_as is not None else imp ) + " = _coconut_sys" ) elif imp_as is not None and "." in imp_as : fake_mods = imp_as . split ( "." ) out . append ( import_stmt ( imp_from , imp , import_as_var ) ) for i in range ( 1 , len ( fake_mods ) ) : mod_name = "." . join ( fake_mods [ : i ] ) out . extend ( ( "try:" , openindent + mod_name , closeindent + "except:" , openindent + mod_name + ' = _coconut.types.ModuleType("' + mod_name + '")' , closeindent + "else:" , openindent + "if not _coconut.isinstance(" + mod_name + ", _coconut.types.ModuleType):" , openindent + mod_name + ' = _coconut.types.ModuleType("' + mod_name + '")' + closeindent * 2 , ) ) out . append ( "." . join ( fake_mods ) + " = " + import_as_var ) else : out . append ( import_stmt ( imp_from , imp , imp_as ) ) return out
|
Generate import statements from a fully qualified import and the name to bind it to .
|
7,768
|
def split_args_list ( tokens , loc ) : req_args , def_args , star_arg , kwd_args , dubstar_arg = [ ] , [ ] , None , [ ] , None pos = 0 for arg in tokens : if len ( arg ) == 1 : if arg [ 0 ] == "*" : if pos >= 3 : raise CoconutDeferredSyntaxError ( "star separator at invalid position in function definition" , loc ) pos = 3 else : if pos > 0 : raise CoconutDeferredSyntaxError ( "positional arguments must come first in function definition" , loc ) req_args . append ( arg [ 0 ] ) elif len ( arg ) == 2 : if arg [ 0 ] == "*" : if pos >= 2 : raise CoconutDeferredSyntaxError ( "star argument at invalid position in function definition" , loc ) pos = 2 star_arg = arg [ 1 ] elif arg [ 0 ] == "**" : if pos == 4 : raise CoconutDeferredSyntaxError ( "double star argument at invalid position in function definition" , loc ) pos = 4 dubstar_arg = arg [ 1 ] else : if pos <= 1 : pos = 1 def_args . append ( ( arg [ 0 ] , arg [ 1 ] ) ) elif pos <= 3 : pos = 3 kwd_args . append ( ( arg [ 0 ] , arg [ 1 ] ) ) else : raise CoconutDeferredSyntaxError ( "invalid default argument in function definition" , loc ) else : raise CoconutInternalException ( "invalid function definition argument" , arg ) return req_args , def_args , star_arg , kwd_args , dubstar_arg
|
Splits function definition arguments .
|
7,769
|
def match_case_tokens ( loc , tokens , check_var , top ) : if len ( tokens ) == 2 : matches , stmts = tokens cond = None elif len ( tokens ) == 3 : matches , cond , stmts = tokens else : raise CoconutInternalException ( "invalid case match tokens" , tokens ) matching = Matcher ( loc , check_var ) matching . match ( matches , match_to_var ) if cond : matching . add_guard ( cond ) return matching . build ( stmts , set_check_var = top )
|
Build code for matching the given case .
|
7,770
|
def setup ( self , target = None , strict = False , minify = False , line_numbers = False , keep_lines = False , no_tco = False ) : if target is None : target = "" else : target = str ( target ) . replace ( "." , "" ) if target in pseudo_targets : target = pseudo_targets [ target ] if target not in targets : raise CoconutException ( "unsupported target Python version " + ascii ( target ) , extra = "supported targets are " + ', ' . join ( ascii ( t ) for t in specific_targets ) + ", or leave blank for universal" , ) logger . log_vars ( "Compiler args:" , locals ( ) ) self . target , self . strict , self . minify , self . line_numbers , self . keep_lines , self . no_tco = ( target , strict , minify , line_numbers , keep_lines , no_tco , )
|
Initializes parsing parameters .
|
7,771
|
def genhash ( self , package , code ) : return hex ( checksum ( hash_sep . join ( str ( item ) for item in ( VERSION_STR , ) + self . __reduce__ ( ) [ 1 ] + ( package , code ) ) . encode ( default_encoding ) , ) )
|
Generate a hash from code .
|
7,772
|
def reset ( self ) : self . indchar = None self . comments = { } self . refs = [ ] self . set_skips ( [ ] ) self . docstring = "" self . ichain_count = 0 self . tre_store_count = 0 self . case_check_count = 0 self . stmt_lambdas = [ ] if self . strict : self . unused_imports = set ( ) self . bind ( )
|
Resets references .
|
7,773
|
def set_skips ( self , skips ) : skips . sort ( ) internal_assert ( lambda : len ( set ( skips ) ) == len ( skips ) , "duplicate line skip(s) in skips" , skips ) self . skips = skips
|
Set the line skips .
|
7,774
|
def adjust ( self , ln ) : adj_ln = ln need_unskipped = 0 for i in self . skips : if i <= ln : need_unskipped += 1 elif adj_ln + need_unskipped < i : break else : need_unskipped -= i - adj_ln - 1 adj_ln = i return adj_ln + need_unskipped
|
Converts a parsing line number into an original line number .
|
7,775
|
def reformat ( self , snip , index = None ) : if index is not None : return self . reformat ( snip ) , len ( self . reformat ( snip [ : index ] ) ) else : return self . repl_proc ( snip , reformatting = True , log = False )
|
Post process a preprocessed snippet .
|
7,776
|
def eval_now ( self , code ) : result = eval ( self . reformat ( code ) ) if result is None or isinstance ( result , ( bool , int , float , complex ) ) : return repr ( result ) elif isinstance ( result , bytes ) : return "b" + self . wrap_str_of ( result ) elif isinstance ( result , str ) : return self . wrap_str_of ( result ) else : return None
|
Reformat and evaluate a code snippet and return code for the result .
|
7,777
|
def make_err ( self , errtype , message , original , loc , ln = None , reformat = True , * args , ** kwargs ) : if ln is None : ln = self . adjust ( lineno ( loc , original ) ) errstr , index = getline ( loc , original ) , col ( loc , original ) - 1 if reformat : errstr , index = self . reformat ( errstr , index ) return errtype ( message , errstr , index , ln , * args , ** kwargs )
|
Generate an error of the specified type .
|
7,778
|
def strict_err_or_warn ( self , * args , ** kwargs ) : if self . strict : raise self . make_err ( CoconutStyleError , * args , ** kwargs ) else : logger . warn_err ( self . make_err ( CoconutSyntaxWarning , * args , ** kwargs ) )
|
Raises an error if in strict mode otherwise raises a warning .
|
7,779
|
def add_ref ( self , reftype , data ) : ref = ( reftype , data ) try : index = self . refs . index ( ref ) except ValueError : self . refs . append ( ref ) index = len ( self . refs ) - 1 return str ( index )
|
Add a reference and returns the identifier .
|
7,780
|
def get_ref ( self , reftype , index ) : try : got_reftype , data = self . refs [ int ( index ) ] except ( IndexError , ValueError ) : raise CoconutInternalException ( "no reference at invalid index" , index ) internal_assert ( got_reftype == reftype , "wanted " + reftype + " reference; got " + got_reftype + " reference" ) return data
|
Retrieve a reference .
|
7,781
|
def wrap_str ( self , text , strchar , multiline = False ) : if multiline : strchar *= 3 return strwrapper + self . add_ref ( "str" , ( text , strchar ) ) + unwrapper
|
Wrap a string .
|
7,782
|
def wrap_str_of ( self , text ) : text_repr = ascii ( text ) internal_assert ( text_repr [ 0 ] == text_repr [ - 1 ] and text_repr [ 0 ] in ( "'" , '"' ) , "cannot wrap str of" , text ) return self . wrap_str ( text_repr [ 1 : - 1 ] , text_repr [ - 1 ] )
|
Wrap a string of a string .
|
7,783
|
def wrap_passthrough ( self , text , multiline = True ) : if not multiline : text = text . lstrip ( ) if multiline : out = "\\" else : out = "\\\\" out += self . add_ref ( "passthrough" , text ) + unwrapper if not multiline : out += "\n" return out
|
Wrap a passthrough .
|
7,784
|
def wrap_comment ( self , text , reformat = True ) : if reformat : text = self . reformat ( text ) return "#" + self . add_ref ( "comment" , text ) + unwrapper
|
Wrap a comment .
|
7,785
|
def apply_procs ( self , procs , kwargs , inputstring , log = True ) : for get_proc in procs : proc = get_proc ( self ) inputstring = proc ( inputstring , ** kwargs ) if log : logger . log_tag ( proc . __name__ , inputstring , multiline = True ) return inputstring
|
Apply processors to inputstring .
|
7,786
|
def pre ( self , inputstring , ** kwargs ) : out = self . apply_procs ( self . preprocs , kwargs , str ( inputstring ) ) logger . log_tag ( "skips" , self . skips ) return out
|
Perform pre - processing .
|
7,787
|
def getheader ( self , which , use_hash = None , polish = True ) : header = getheader ( which , use_hash = use_hash , target = self . target , no_tco = self . no_tco , strict = self . strict , ) if polish : header = self . polish ( header ) return header
|
Get a formatted header .
|
7,788
|
def make_syntax_err ( self , err , original ) : msg , loc = err . args return self . make_err ( CoconutSyntaxError , msg , original , loc )
|
Make a CoconutSyntaxError from a CoconutDeferredSyntaxError .
|
7,789
|
def make_parse_err ( self , err , reformat = True , include_ln = True ) : err_line = err . line err_index = err . col - 1 err_lineno = err . lineno if include_ln else None if reformat : err_line , err_index = self . reformat ( err_line , err_index ) if err_lineno is not None : err_lineno = self . adjust ( err_lineno ) return CoconutParseError ( None , err_line , err_index , err_lineno )
|
Make a CoconutParseError from a ParseBaseException .
|
7,790
|
def parse ( self , inputstring , parser , preargs , postargs ) : self . reset ( ) pre_procd = None with logger . gather_parsing_stats ( ) : try : pre_procd = self . pre ( inputstring , ** preargs ) parsed = parse ( parser , pre_procd ) out = self . post ( parsed , ** postargs ) except ParseBaseException as err : raise self . make_parse_err ( err ) except CoconutDeferredSyntaxError as err : internal_assert ( pre_procd is not None , "invalid deferred syntax error in pre-processing" , err ) raise self . make_syntax_err ( err , pre_procd ) except RuntimeError as err : raise CoconutException ( str ( err ) , extra = "try again with --recursion-limit greater than the current " + str ( sys . getrecursionlimit ( ) ) , ) if self . strict : for name in self . unused_imports : if name != "*" : logger . warn ( "found unused import" , name , extra = "disable --strict to dismiss" ) return out
|
Use the parser to parse the inputstring with appropriate setup and teardown .
|
7,791
|
def prepare ( self , inputstring , strip = False , nl_at_eof_check = False , ** kwargs ) : if self . strict and nl_at_eof_check and inputstring and not inputstring . endswith ( "\n" ) : end_index = len ( inputstring ) - 1 if inputstring else 0 raise self . make_err ( CoconutStyleError , "missing new line at end of file" , inputstring , end_index ) original_lines = inputstring . splitlines ( ) if self . keep_lines : self . original_lines = original_lines inputstring = "\n" . join ( original_lines ) if strip : inputstring = inputstring . strip ( ) return inputstring
|
Prepare a string for processing .
|
7,792
|
def passthrough_proc ( self , inputstring , ** kwargs ) : out = [ ] found = None hold = None count = None multiline = None skips = self . copy_skips ( ) for i , c in enumerate ( append_it ( inputstring , "\n" ) ) : if hold is not None : count += paren_change ( c , opens = "(" , closes = ")" ) if count >= 0 and c == hold : out . append ( self . wrap_passthrough ( found , multiline ) ) found = None hold = None count = None multiline = None else : if c == "\n" : skips = addskip ( skips , self . adjust ( lineno ( i , inputstring ) ) ) found += c elif found : if c == "\\" : found = "" hold = "\n" count = 0 multiline = False elif c == "(" : found = "" hold = ")" count = - 1 multiline = True else : out . append ( "\\" + c ) found = None elif c == "\\" : found = True else : out . append ( c ) if hold is not None or found is not None : raise self . make_err ( CoconutSyntaxError , "unclosed passthrough" , inputstring , i ) self . set_skips ( skips ) return "" . join ( out )
|
Process python passthroughs .
|
7,793
|
def leading_whitespace ( self , inputstring ) : count = 0 for i , c in enumerate ( inputstring ) : if c == " " : count += 1 elif c == "\t" : count += tabworth - ( i % tabworth ) else : break if self . indchar is None : self . indchar = c elif c != self . indchar : self . strict_err_or_warn ( "found mixing of tabs and spaces" , inputstring , i ) return count
|
Count leading whitespace .
|
7,794
|
def stmt_lambda_proc ( self , inputstring , ** kwargs ) : regexes = [ ] for i in range ( len ( self . stmt_lambdas ) ) : name = self . stmt_lambda_name ( i ) regex = compile_regex ( r"\b%s\b" % ( name , ) ) regexes . append ( regex ) out = [ ] for line in inputstring . splitlines ( ) : for i , regex in enumerate ( regexes ) : if regex . search ( line ) : indent , line = split_leading_indent ( line ) out . append ( indent + self . stmt_lambdas [ i ] ) out . append ( line ) return "\n" . join ( out )
|
Add statement lambda definitions .
|
7,795
|
def reind_proc ( self , inputstring , ** kwargs ) : out = [ ] level = 0 for line in inputstring . splitlines ( ) : line , comment = split_comment ( line . strip ( ) ) indent , line = split_leading_indent ( line ) level += ind_change ( indent ) if line : line = " " * self . tabideal * level + line line , indent = split_trailing_indent ( line ) level += ind_change ( indent ) line = ( line + comment ) . rstrip ( ) out . append ( line ) if level != 0 : complain ( CoconutInternalException ( "non-zero final indentation level" , level ) ) return "\n" . join ( out )
|
Add back indentation .
|
7,796
|
def ln_comment ( self , ln ) : if self . keep_lines : if not 1 <= ln <= len ( self . original_lines ) + 1 : raise CoconutInternalException ( "out of bounds line number" , ln , "not in range [1, " + str ( len ( self . original_lines ) + 1 ) + "]" , ) elif ln == len ( self . original_lines ) + 1 : lni = - 1 else : lni = ln - 1 if self . line_numbers and self . keep_lines : if self . minify : comment = str ( ln ) + " " + self . original_lines [ lni ] else : comment = " line " + str ( ln ) + ": " + self . original_lines [ lni ] elif self . keep_lines : if self . minify : comment = self . original_lines [ lni ] else : comment = " " + self . original_lines [ lni ] elif self . line_numbers : if self . minify : comment = str ( ln ) else : comment = " line " + str ( ln ) else : return "" return self . wrap_comment ( comment , reformat = False )
|
Get an end line comment . CoconutInternalExceptions should always be caught and complained .
|
7,797
|
def endline_repl ( self , inputstring , reformatting = False , ** kwargs ) : out = [ ] ln = 1 for line in inputstring . splitlines ( ) : add_one_to_ln = False try : if line . endswith ( lnwrapper ) : line , index = line [ : - 1 ] . rsplit ( "#" , 1 ) new_ln = self . get_ref ( "ln" , index ) if new_ln < ln : raise CoconutInternalException ( "line number decreased" , ( ln , new_ln ) ) ln = new_ln line = line . rstrip ( ) add_one_to_ln = True if not reformatting or add_one_to_ln : line += self . comments . get ( ln , "" ) if not reformatting and line . rstrip ( ) and not line . lstrip ( ) . startswith ( "#" ) : line += self . ln_comment ( ln ) except CoconutInternalException as err : complain ( err ) out . append ( line ) if add_one_to_ln : ln += 1 return "\n" . join ( out )
|
Add end of line comments .
|
7,798
|
def passthrough_repl ( self , inputstring , ** kwargs ) : out = [ ] index = None for c in append_it ( inputstring , None ) : try : if index is not None : if c is not None and c in nums : index += c elif c == unwrapper and index : ref = self . get_ref ( "passthrough" , index ) out . append ( ref ) index = None elif c != "\\" or index : out . append ( "\\" + index ) if c is not None : out . append ( c ) index = None elif c is not None : if c == "\\" : index = "" else : out . append ( c ) except CoconutInternalException as err : complain ( err ) if index is not None : out . append ( index ) index = None out . append ( c ) return "" . join ( out )
|
Add back passthroughs .
|
7,799
|
def str_repl ( self , inputstring , ** kwargs ) : out = [ ] comment = None string = None for i , c in enumerate ( append_it ( inputstring , None ) ) : try : if comment is not None : if c is not None and c in nums : comment += c elif c == unwrapper and comment : ref = self . get_ref ( "comment" , comment ) if out and not out [ - 1 ] . endswith ( "\n" ) : out [ - 1 ] = out [ - 1 ] . rstrip ( " " ) if not self . minify : out [ - 1 ] += " " out . append ( "#" + ref ) comment = None else : raise CoconutInternalException ( "invalid comment marker in" , getline ( i , inputstring ) ) elif string is not None : if c is not None and c in nums : string += c elif c == unwrapper and string : text , strchar = self . get_ref ( "str" , string ) out . append ( strchar + text + strchar ) string = None else : raise CoconutInternalException ( "invalid string marker in" , getline ( i , inputstring ) ) elif c is not None : if c == "#" : comment = "" elif c == strwrapper : string = "" else : out . append ( c ) except CoconutInternalException as err : complain ( err ) if comment is not None : out . append ( comment ) comment = None if string is not None : out . append ( string ) string = None out . append ( c ) return "" . join ( out )
|
Add back strings .
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.