idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
7,900
def qteStartRecordingHook ( self , msgObj ) : if self . qteRecording : self . qteMain . qteStatus ( 'Macro recording already enabled' ) return self . qteRecording = True self . qteMain . qteStatus ( 'Macro recording started' ) self . recorded_keysequence = QtmacsKeysequence ( ) self . qteMain . qtesigKeyparsed . connect ( self . qteKeyPress ) self . qteMain . qtesigAbort . connect ( self . qteStopRecordingHook )
Commence macro recording .
7,901
def qteStopRecordingHook ( self , msgObj ) : if self . qteRecording : self . qteRecording = False self . qteMain . qteStatus ( 'Macro recording stopped' ) self . qteMain . qtesigKeyparsed . disconnect ( self . qteKeyPress ) self . qteMain . qtesigAbort . disconnect ( self . qteStopRecordingHook )
Stop macro recording .
7,902
def qteReplayKeysequenceHook ( self , msgObj ) : if self . recorded_keysequence . toString ( ) == '' : return if self . qteRecording : return self . qteMain . qteEmulateKeypresses ( self . recorded_keysequence )
Replay the macro sequence .
7,903
def qteKeyPress ( self , msgObj ) : ( srcObj , keysequence , macroName ) = msgObj . data if macroName is None : return if macroName == self . qteMacroName ( ) : self . abort ( ) else : msg = 'Executing macro {} through {}' msg = msg . format ( macroName , self . qteMacroName ( ) ) self . qteMain . qteStatus ( msg ) self . qteMain . qteRunMacro ( macroName , srcObj , keysequence )
Record the key presses .
7,904
def abort ( self , msgObj ) : self . qteMain . qtesigKeyparsed . disconnect ( self . qteKeyPress ) self . qteMain . qtesigAbort . disconnect ( self . abort ) self . qteActive = False self . qteMain . qteEnableMacroProcessing ( )
Disconnect all signals and turn macro processing in the event handler back on .
7,905
def get_new_client ( request_session = False ) : from . client import ConciergeClient client = ConciergeClient ( access_key = os . environ [ "MS_ACCESS_KEY" ] , secret_key = os . environ [ "MS_SECRET_KEY" ] , association_id = os . environ [ "MS_ASSOCIATION_ID" ] ) if request_session : client . request_session ( ) return client
Return a new ConciergeClient pulling secrets from the environment .
7,906
def submit_msql_object_query ( object_query , client = None ) : client = client or get_new_client ( ) if not client . session_id : client . request_session ( ) result = client . execute_object_query ( object_query ) execute_msql_result = result [ "body" ] [ "ExecuteMSQLResult" ] membersuite_object_list = [ ] if execute_msql_result [ "Success" ] : result_value = execute_msql_result [ "ResultValue" ] if result_value [ "ObjectSearchResult" ] [ "Objects" ] : membersuite_object_list = [ ] for obj in ( result_value [ "ObjectSearchResult" ] [ "Objects" ] [ "MemberSuiteObject" ] ) : membersuite_object = membersuite_object_factory ( obj ) membersuite_object_list . append ( membersuite_object ) elif result_value [ "SingleObject" ] [ "ClassType" ] : membersuite_object = membersuite_object_factory ( execute_msql_result [ "ResultValue" ] [ "SingleObject" ] ) membersuite_object_list . append ( membersuite_object ) elif ( result_value [ "ObjectSearchResult" ] [ "Objects" ] is None and result_value [ "SingleObject" ] [ "ClassType" ] is None ) : raise NoResultsError ( result = execute_msql_result ) return membersuite_object_list else : raise ExecuteMSQLError ( result = execute_msql_result )
Submit object_query to MemberSuite returning . models . MemberSuiteObjects .
7,907
def value_for_key ( membersuite_object_data , key ) : key_value_dicts = { d [ 'Key' ] : d [ 'Value' ] for d in membersuite_object_data [ "Fields" ] [ "KeyValueOfstringanyType" ] } return key_value_dicts [ key ]
Return the value for key of membersuite_object_data .
7,908
def usesTime ( self , fmt = None ) : if fmt is None : fmt = self . _fmt if not isinstance ( fmt , basestring ) : fmt = fmt [ 0 ] return fmt . find ( '%(asctime)' ) >= 0
Check if the format uses the creation time of the record .
7,909
def qteIsQtmacsWidget ( widgetObj ) : if widgetObj is None : return False if hasattr ( widgetObj , '_qteAdmin' ) : return True visited = [ widgetObj ] wid = widgetObj . parent ( ) while wid not in visited : if hasattr ( wid , '_qteAdmin' ) : return True elif wid is None : return False else : visited . append ( wid ) wid = wid . parent ( ) return False
Determine if a widget is part of Qtmacs widget hierarchy .
7,910
def qteGetAppletFromWidget ( widgetObj ) : if widgetObj is None : return None if hasattr ( widgetObj , '_qteAdmin' ) : return widgetObj . _qteAdmin . qteApplet visited = [ widgetObj ] wid = widgetObj . parent ( ) while wid not in visited : if hasattr ( wid , '_qteAdmin' ) : return wid . _qteAdmin . qteApplet elif wid is None : return None else : visited . append ( wid ) wid = wid . parent ( ) return None
Return the parent applet of widgetObj .
7,911
def setHookName ( self , name : str ) : self . isHook = True self . messengerName = name
Specify that the message will be delivered with the hook name .
7,912
def setSignalName ( self , name : str ) : self . isHook = False self . messengerName = name
Specify that the message will be delivered with the signal name .
7,913
def qteSetKeyFilterPolicy ( self , receiveBefore : bool = False , useQtmacs : bool = None , receiveAfter : bool = False ) : self . filterKeyEvents = useQtmacs self . receiveBeforeQtmacsParser = receiveBefore self . receiveAfterQtmacsParser = receiveAfter
Set the policy on how Qtmacs filters keyboard events for a particular widgets .
7,914
def appendQKeyEvent ( self , keyEvent : QtGui . QKeyEvent ) : self . keylistKeyEvent . append ( keyEvent ) mod = keyEvent . modifiers ( ) key = keyEvent . key ( ) self . keylistQtConstants . append ( ( int ( mod ) , key ) )
Append another key to the key sequence represented by this object .
7,915
def qteInsertKey ( self , keysequence : QtmacsKeysequence , macroName : str ) : keyMap = self keysequence = keysequence . toQtKeylist ( ) for key in keysequence [ : - 1 ] : if key not in keyMap : keyMap [ key ] = { } if not isinstance ( keyMap [ key ] , dict ) : keyMap [ key ] = { } keyMap = keyMap [ key ] keyMap [ keysequence [ - 1 ] ] = macroName
Insert a new key into the key map and associate it with a macro .
7,916
def qteRemoveKey ( self , keysequence : QtmacsKeysequence ) : keyMap = self keyMapRef = keyMap keysequence = keysequence . toQtKeylist ( ) for key in keysequence [ : - 1 ] : if key not in keyMap : return keyMap = keyMap [ key ] if keysequence [ - 1 ] not in keyMap : return else : keyMap . pop ( keysequence [ - 1 ] ) keysequence = keysequence [ : - 1 ] while ( len ( keysequence ) ) : keyMap = keyMapRef for key in keysequence [ : - 1 ] : keyMap = keyMap [ key ] if len ( keyMap [ key ] ) : return else : keyMap . pop ( key )
Remove keysequence from this key map .
7,917
def match ( self , keysequence : QtmacsKeysequence ) : try : macroName = self for _ in keysequence . toQtKeylist ( ) : macroName = macroName [ _ ] except KeyError : return ( None , False ) if isinstance ( macroName , dict ) : return ( None , True ) else : return ( macroName , True )
Look up the key sequence in key map .
7,918
def _qteGetLabelInstance ( self ) : layout = self . layout ( ) label = QtGui . QLabel ( self ) style = 'QLabel { background-color : white; color : blue; }' label . setStyleSheet ( style ) return label
Return an instance of a QLabel with the correct color scheme .
7,919
def _qteUpdateLabelWidths ( self ) : layout = self . layout ( ) for ii in range ( layout . count ( ) ) : label = layout . itemAt ( ii ) layout . removeItem ( label ) for item in self . _qteModeList : label = item [ 2 ] width = label . fontMetrics ( ) . size ( 0 , str ( item [ 1 ] ) ) . width ( ) label . setMaximumWidth ( width ) label . setMinimumWidth ( width ) layout . addWidget ( label ) _ , _ , label = self . _qteModeList [ - 1 ] label . setMaximumWidth ( 1600000 )
Ensure all but the last QLabel are only as wide as necessary .
7,920
def qteGetMode ( self , mode : str ) : for item in self . _qteModeList : if item [ 0 ] == mode : return item return None
Return a tuple containing the mode its value and its associated QLabel instance .
7,921
def qteAddMode ( self , mode : str , value ) : label = self . _qteGetLabelInstance ( ) label . setText ( value ) self . _qteModeList . append ( ( mode , value , label ) ) self . _qteUpdateLabelWidths ( )
Append label for mode and display value on it .
7,922
def qteChangeModeValue ( self , mode : str , value ) : for idx , item in enumerate ( self . _qteModeList ) : if item [ 0 ] == mode : label = item [ 2 ] label . setText ( value ) self . _qteModeList [ idx ] = ( mode , value , label ) self . _qteUpdateLabelWidths ( ) return True return False
Change the value of mode to value .
7,923
def qteInsertMode ( self , pos : int , mode : str , value ) : label = self . _qteGetLabelInstance ( ) label . setText ( value ) self . _qteModeList . insert ( pos , ( mode , value , label ) ) self . _qteUpdateLabelWidths ( )
Insert mode at position pos .
7,924
def qteRemoveMode ( self , mode : str ) : for idx , item in enumerate ( self . _qteModeList ) : if item [ 0 ] == mode : self . _qteModeList . remove ( item ) item [ 2 ] . hide ( ) item [ 2 ] . deleteLater ( ) self . _qteUpdateLabelWidths ( ) return True return False
Remove mode and associated label .
7,925
def _get_bases ( type_ ) : try : class _ ( type_ ) : BaseClass = type_ except TypeError : BaseClass = object class MetaClass ( _ValidationMeta , BaseClass . __class__ ) : return BaseClass , MetaClass
Get the base and meta classes to use in creating a subclass .
7,926
def _instantiate ( class_ , type_ , __value , * args , ** kwargs ) : try : return class_ ( __value , * args , ** kwargs ) except TypeError : try : return type_ ( __value , * args , ** kwargs ) except Exception : return __value
Instantiate the object if possible .
7,927
def _get_fullname ( obj ) : if not hasattr ( obj , "__name__" ) : obj = obj . __class__ if obj . __module__ in ( "builtins" , "__builtin__" ) : return obj . __name__ return "{}.{}" . format ( obj . __module__ , obj . __name__ )
Get the full name of an object including the module .
7,928
def get ( self , key , recursive = False , sorted = False , quorum = False , timeout = None ) : return self . adapter . get ( key , recursive = recursive , sorted = sorted , quorum = quorum , timeout = timeout )
Gets a value of key .
7,929
def wait ( self , key , index = 0 , recursive = False , sorted = False , quorum = False , timeout = None ) : return self . adapter . get ( key , recursive = recursive , sorted = sorted , quorum = quorum , wait = True , wait_index = index , timeout = timeout )
Waits until a node changes .
7,930
def refresh ( self , key , ttl , prev_value = None , prev_index = None , timeout = None ) : return self . adapter . set ( key , ttl = ttl , refresh = True , prev_value = prev_value , prev_index = prev_index , timeout = timeout )
Sets only a TTL of a key . The waiters doesn t receive notification by this operation .
7,931
def create ( self , key , value = None , dir = False , ttl = None , timeout = None ) : return self . adapter . set ( key , value , dir = dir , ttl = ttl , prev_exist = False , timeout = timeout )
Creates a new key .
7,932
def update ( self , key , value = None , dir = False , ttl = None , refresh = False , prev_value = None , prev_index = None , timeout = None ) : return self . adapter . set ( key , value , dir = dir , ttl = ttl , refresh = refresh , prev_value = prev_value , prev_index = prev_index , prev_exist = True , timeout = timeout )
Updates an existing key .
7,933
def append ( self , key , value = None , dir = False , ttl = None , timeout = None ) : return self . adapter . append ( key , value , dir = dir , ttl = ttl , timeout = timeout )
Creates a new automatically increasing key in the given directory key .
7,934
def delete ( self , key , dir = False , recursive = False , prev_value = None , prev_index = None , timeout = None ) : return self . adapter . delete ( key , dir = dir , recursive = recursive , prev_value = prev_value , prev_index = prev_index , timeout = timeout )
Deletes a key .
7,935
def find ( self , cell_designation , cell_filter = lambda x , c : 'c' in x and x [ 'c' ] == c ) : res = [ i for i , sc in enumerate ( self . spike_containers ) if cell_filter ( sc . meta , cell_designation ) ] if len ( res ) > 0 : return res [ 0 ]
finds spike containers in a multi spike containers collection
7,936
def len ( self , resolution = 1.0 , units = None , conversion_function = convert_time , end_at_end = True ) : if units is not None : resolution = conversion_function ( resolution , from_units = units , to_units = self . units ) else : units = self . units if self . min is None : return int ( self . max / resolution ) if self . max is None : return 0 if units != '1' and end_at_end : return int ( np . ceil ( ( self . max - self . min ) / resolution ) ) return int ( np . ceil ( ( self . max - self . min ) / resolution ) + 1 )
Calculates the length of the Label Dimension from its minimum maximum and wether it is discrete .
7,937
def logspace ( self , bins = None , units = None , conversion_function = convert_time , resolution = None , end_at_end = True ) : if type ( bins ) in [ list , np . ndarray ] : return bins min = conversion_function ( self . min , from_units = self . units , to_units = units ) max = conversion_function ( self . max , from_units = self . units , to_units = units ) if units is None : units = self . units if resolution is None : resolution = 1.0 if bins is None : bins = self . len ( resolution = resolution , units = units , conversion_function = conversion_function ) if units != '1' and end_at_end : return np . logspace ( np . log10 ( min ) , np . log10 ( max ) , bins + 1 ) [ : - 1 ] return np . logspace ( np . log10 ( min ) , np . log10 ( max ) , bins )
bins overwrites resolution
7,938
def constraint_range_dict ( self , * args , ** kwargs ) : bins = self . bins ( * args , ** kwargs ) return [ { self . name + '__gte' : a , self . name + '__lt' : b } for a , b in zip ( bins [ : - 1 ] , bins [ 1 : ] ) ] space = self . space ( * args , ** kwargs ) resolution = space [ 1 ] - space [ 0 ] return [ { self . name + '__gte' : s , self . name + '__lt' : s + resolution } for s in space ]
Creates a list of dictionaries which each give a constraint for a certain section of the dimension .
7,939
def find_labels ( self , key , find_in_name = True , find_in_units = False ) : if type ( key ) is str : found_keys = [ ] if key . startswith ( '~' ) : for label_no , label in enumerate ( self . labels ) : if find_in_name and key [ 1 : ] in label . name : found_keys . append ( label_no ) if find_in_units and key [ 1 : ] in label . units : found_keys . append ( label_no ) else : for label_no , label in enumerate ( self . labels ) : if find_in_name and key == label . name : return [ label_no ] if find_in_units and key == label . units : return [ label_no ] return found_keys if hasattr ( key , '__call__' ) : found_keys = [ ] for label_no , label in enumerate ( self . labels ) : if key ( label ) : found_keys . append ( label_no ) return found_keys if type ( key ) is int : return [ key ] if key < self . matrix . shape [ 1 ] else [ ] return [ key ]
Takes a string or a function to find a set of label indizes that match . If the string starts with a ~ the label only has to contain the string .
7,940
def convert ( self , label , units = None , conversion_function = convert_time ) : label_no = self . get_label_no ( label ) new_label , new_column = self . get_converted ( label_no , units , conversion_function ) labels = [ LabelDimension ( l ) for l in self . labels ] labels [ label_no ] = new_label matrix = self . matrix . copy ( ) matrix [ : , label_no ] = new_column return LabeledMatrix ( matrix , labels )
converts a dimension in place
7,941
def _get_constrained_labels ( self , remove_dimensions = False , ** kwargs ) : new_labels = [ ] for label_no , label in enumerate ( self . labels ) : new_label = LabelDimension ( label ) remove = False for k in kwargs : if k == label . name : new_label . max = kwargs [ k ] new_label . min = kwargs [ k ] remove = True if k == label . name + '__lt' : if new_label . units == '1' : new_label . max = np . min ( [ new_label . max , kwargs [ k ] - 1 ] ) else : new_label . max = np . min ( [ new_label . max , kwargs [ k ] ] ) if k == label . name + '__lte' : new_label . max = np . min ( [ new_label . max , kwargs [ k ] ] ) if k == label . name + '__gt' : if new_label . units == '1' : new_label . min = np . max ( [ new_label . min , kwargs [ k ] + 1 ] ) else : new_label . min = np . max ( [ new_label . min , kwargs [ k ] ] ) if k == label . name + '__gte' : new_label . min = np . max ( [ new_label . min , kwargs [ k ] ] ) if k == label . name + '__evals' : remove = True if remove_dimensions : if remove : continue new_labels . append ( new_label ) return new_labels
returns labels which have updated minima and maxima depending on the kwargs supplied to this
7,942
def store_meta ( self , meta ) : "Inplace method that adds meta information to the meta dictionary" if self . meta is None : self . meta = { } self . meta . update ( meta ) return self
Inplace method that adds meta information to the meta dictionary
7,943
def find ( self , cell_designation , cell_filter = lambda x , c : 'c' in x and x [ 'c' ] == c ) : if 'parent' in self . meta : return ( self . meta [ 'parent' ] , self . meta [ 'parent' ] . find ( cell_designation , cell_filter = cell_filter ) )
finds spike containers in multi spike containers collection offspring
7,944
def ISIs ( self , time_dimension = 0 , units = None , min_t = None , max_t = None ) : units = self . _default_units ( units ) converted_dimension , st = self . spike_times . get_converted ( time_dimension , units ) if min_t is None : min_t = converted_dimension . min if max_t is None : max_t = converted_dimension . max return np . diff ( sorted ( st [ ( st > min_t ) * ( st < max_t ) ] ) )
returns the Inter Spike Intervals
7,945
def temporal_firing_rate ( self , time_dimension = 0 , resolution = 1.0 , units = None , min_t = None , max_t = None , weight_function = None , normalize_time = False , normalize_n = False , start_units_with_0 = True , cell_dimension = 'N' ) : units = self . _default_units ( units ) if self . data_format == 'spike_times' : converted_dimension , st = self . spike_times . get_converted ( 0 , units ) if min_t is None : min_t = converted_dimension . min if max_t is None : max_t = converted_dimension . max st = st [ ( st >= min_t ) * ( st < max_t ) ] bins = converted_dimension . linspace_by_resolution ( resolution , end_at_end = True , extra_bins = 0 ) H , edg = np . histogram ( st , bins = bins ) if normalize_time : H = H / ( convert_time ( resolution , from_units = units , to_units = 's' ) ) if normalize_n : H = H / ( len ( np . unique ( self . spike_times [ cell_dimension ] ) ) ) return H , edg
Outputs a time histogram of spikes .
7,946
def plot_temporal_firing_rate ( self , time_dimension = 0 , resolution = 1.0 , units = None , min_t = None , max_t = None , weight_function = None , normalize_time = False , normalize_n = False , start_units_with_0 = True , cell_dimension = 'N' , ** kwargs ) : if bool ( self ) : import matplotlib . pylab as plt H , ed = self . temporal_firing_rate ( time_dimension = time_dimension , resolution = resolution , units = units , min_t = min_t , max_t = max_t , weight_function = weight_function , normalize_time = normalize_time , normalize_n = normalize_n , start_units_with_0 = start_units_with_0 , cell_dimension = cell_dimension ) plt . plot ( ed [ 1 : ] , H , ** kwargs )
Plots a firing rate plot .
7,947
def get_units ( self , * args , ** kwargs ) : if len ( args ) == 1 : return self . spike_times . get_label ( args [ 0 ] ) . units return [ self . spike_times . get_label ( a ) . units for a in args ]
Returns the units of a Dimension
7,948
def get_min ( self , * args , ** kwargs ) : if len ( args ) == 1 : return self . spike_times . get_label ( args [ 0 ] ) . min return [ self . spike_times . get_label ( a ) . max for a in args ]
Returns the minimum of a Dimension
7,949
def get_max ( self , * args , ** kwargs ) : if len ( args ) == 1 : return self . spike_times . get_label ( args [ 0 ] ) . max return [ self . spike_times . get_label ( a ) . max for a in args ]
Returns the maximum of a Dimension
7,950
def linspace_bins ( self , dim , * args , ** kwargs ) : return self . spike_times . get_label ( dim ) . linspace_bins ( * args , ** kwargs )
Like linspace but shifts the space to create edges for histograms .
7,951
def create_SpikeGeneratorGroup ( self , time_label = 0 , index_label = 1 , reorder_indices = False , index_offset = True ) : import brian2 spike_times = self . spike_times . convert ( time_label , 's' ) [ time_label ] * brian2 . second indices = [ 0 ] * len ( spike_times ) if len ( self . spike_times . find_labels ( index_label ) ) : indices = self . spike_times [ index_label ] if index_offset is not False : if index_offset is True : indices = indices - self . spike_times . get_label ( index_label ) . min else : indices = indices - index_offset N = np . max ( indices ) else : N = self . spike_times . get_label ( index_label ) . max if reorder_indices : indices_levels = np . sort ( np . unique ( indices ) ) . tolist ( ) indices = np . array ( [ indices_levels . index ( i ) for i in indices ] ) N = len ( indices_levels ) return brian2 . SpikeGeneratorGroup ( N + 1 , indices = indices , times = spike_times )
Creates a brian 2 create_SpikeGeneratorGroup object that contains the spikes in this container .
7,952
def to_neo ( self , index_label = 'N' , time_label = 0 , name = 'segment of exported spikes' , index = 0 ) : import neo from quantities import s seq = neo . Segment ( name = name , index = index ) t_start = None t_stop = None if self . min_t is not None : t_start = convert_time ( self . min_t , from_units = self . units , to_units = 's' ) * s if self . max_t is not None : t_stop = convert_time ( self . max_t , from_units = self . units , to_units = 's' ) * s for train in self . generate ( index_label ) : seq . spiketrains . append ( neo . SpikeTrain ( train . spike_times . get_converted ( time_label , 's' ) [ 1 ] * s , t_start = t_start , t_stop = t_stop ) ) return seq
Returns a neo Segment containing the spike trains .
7,953
def qteModificationChanged ( self , mod ) : if mod : s = '*' else : s = '-' self . _qteModeBar . qteChangeModeValue ( 'MODIFIED' , s )
Update the modification status in the mode bar .
7,954
def loadFile ( self , fileName ) : self . fileName = fileName self . file = QtCore . QFile ( fileName ) if self . file . exists ( ) : self . qteScintilla . setText ( open ( fileName ) . read ( ) ) self . qteScintilla . qteUndoStack . reset ( ) else : msg = "File <b>{}</b> does not exist" . format ( self . qteAppletID ( ) ) self . qteLogger . info ( msg )
Display the file fileName .
7,955
def conv ( arg , default = None , func = None ) : if func : return func ( arg ) if arg else default else : return arg if arg else default
essentially the generalization of arg if arg else default
7,956
def dump_pickle ( name , obj ) : with open ( name , "wb" ) as f : pickle . dump ( obj , f , 2 ) pass
quick pickle dump similar to np . save
7,957
def chunks ( l , n ) : return [ l [ x : x + n ] for x in range ( 0 , len ( l ) , n ) ]
chunk l in n sized bits
7,958
def check_vprint ( s , vprinter ) : if vprinter is True : print ( s ) elif callable ( vprinter ) : vprinter ( s )
checked verbose printing
7,959
def filelines ( fname , strip = False ) : with open ( fname , 'r' ) as f : lines = f . readlines ( ) if strip : lines [ : ] = [ line . strip ( ) for line in lines ] return lines
read lines from a file into lines ... optional strip
7,960
def parse_utuple ( s , urx , length = 2 ) : if type ( urx ) != str : urx = urx . pattern if length is not None and length < 1 : raise ValueError ( "invalid length: {}" . format ( length ) ) if length == 1 : rx = r"^ *\( *{urx} *,? *\) *$" . format ( urx = urx ) elif length is None : rx = r"^ *\( *(?:{urx} *, *)*{urx} *,? *\) *$" . format ( urx = urx ) else : rx = r"^ *\( *(?:{urx} *, *){{{rep1}}}{urx} *,? *\) *$" . format ( rep1 = length - 1 , urx = urx ) return re . match ( rx , s )
parse a string into a list of a uniform type
7,961
def parse_numtuple ( s , intype , length = 2 , scale = 1 ) : if intype == int : numrx = intrx_s elif intype == float : numrx = fltrx_s else : raise NotImplementedError ( "Not implemented for type: {}" . format ( intype ) ) if parse_utuple ( s , numrx , length = length ) is None : raise ValueError ( "{} is not a valid number tuple." . format ( s ) ) return [ x * scale for x in evalt ( s ) ]
parse a string into a list of numbers of a type
7,962
def parse_ctuple ( s , length = 2 ) : if parse_utuple ( s , colrx_s , length = length ) is None : raise ValueError ( "{} is not a valid color tuple." . format ( s ) ) s = quote_subs ( s , colorfix = True ) return evalt ( s )
parse a string of acceptable colors into matplotlib that is either strings or three tuples of rgb . Don t quote strings .
7,963
def parse_stuple ( s , length = 2 ) : if parse_utuple ( s , isrx_s , length = length ) is None : raise ValueError ( "{} is not a valid string tuple." . format ( s ) ) s = quote_subs ( s ) return evalt ( s )
parse a string of strings . Don t quote strings
7,964
def parse_colors ( s , length = 1 ) : if length and length > 1 : return parse_ctuple ( s , length = length ) if re . match ( '^ *{} *$' . format ( isrx_s ) , s ) : return [ s ] elif re . match ( '^ *{} *$' . format ( rgbrx_s ) , s ) : return [ eval ( s ) ] else : return parse_ctuple ( s , length = length )
helper for parsing a string that can be either a matplotlib color or be a tuple of colors . Returns a tuple of them either way .
7,965
def parse_qs ( s , rx , parsef = None , length = 2 , quote = False ) : if type ( rx ) != str : rx = rx . pattern if re . match ( " *\(.*\)" , s ) : if not parsef : if parse_utuple ( s , rx , length = length ) : if quote : s = quote_subs ( s ) return evalt ( s ) else : raise ValueError ( "{} did is not a valid tuple of {}" . format ( s , rx ) ) else : return parsef ( s , length = length ) elif re . match ( '^ *{} *$' . format ( rx ) , s ) : if quote : return eval ( '["{}"]' . format ( s ) ) return eval ( '[{}]' . format ( s ) ) else : raise ValueError ( "{} does not match '{}' or the passed parsef" . format ( s , rx ) )
helper for parsing a string that can both rx or parsef which is obstensibly the parsef for rx .
7,966
def sd ( d , ** kw ) : r = { } r . update ( d ) r . update ( kw ) return r
A hack to return a modified dict dynamically . Basically Does classless OOP as in js but with dicts although not really for the verb parts of OOP but more of the subject stuff .
7,967
def mk_getkw ( kw , defaults , prefer_passed = False ) : def getkw ( * ls ) : r = [ kw [ l ] if test ( kw , l ) else defaults [ l ] for l in ls ] if len ( r ) == 1 : return r [ 0 ] return r def getkw_prefer_passed ( * ls ) : r = [ kw [ l ] if l in kw else defaults [ l ] for l in ls ] if len ( r ) == 1 : return r [ 0 ] return r return getkw if not prefer_passed else getkw_prefer_passed
a helper for generating a function for reading keywords in interface functions with a dictionary with defaults
7,968
def _load_resource ( self ) : url = self . _url if self . _params : url += '?{0}' . format ( six . moves . urllib_parse . urlencode ( self . _params ) ) r = getattr ( self . _session , self . _meta . get_method . lower ( ) ) ( url ) if r . status_code == 404 : raise NotFoundException ( 'Server returned 404 Not Found for the URL {0}' . format ( self . _url ) ) elif not 200 <= r . status_code < 400 : raise HTTPException ( 'Server returned {0} ({1})' . format ( r . status_code , r . reason ) , r ) data = self . _meta . deserializer . to_dict ( r . text ) self . populate_field_values ( data )
Load resource data from server
7,969
def populate_field_values ( self , data ) : if not self . _meta . case_sensitive_fields : data = { k . lower ( ) : v for k , v in six . iteritems ( data ) } if self . _meta . match_fuzzy_keys : data = { '' . join ( x for x in k if x in ALPHANUMERIC ) . lower ( ) : v for k , v in six . iteritems ( data ) } for field in self . _meta . fields : name = field . name if self . _meta . case_sensitive_fields else field . name . lower ( ) value = None if self . _meta . match_fuzzy_keys : name = '' . join ( x for x in name if x in ALPHANUMERIC ) . lower ( ) if name in data : value = field . to_python ( data [ name ] , self ) elif field . required and field . default is None : message = "Response from {0} is missing required field '{1}'" . format ( self . _url , field . name ) if self . _strict : raise MissingFieldException ( message ) else : logger . warn ( message ) elif field . default is not None : value = copy . copy ( field . default ) setattr ( self , field . _attr_name , value ) self . _populated_field_values = True
Load resource data and populate field values
7,970
def close_thread ( self ) : if self . __thread is not None and self . __thread . is_alive ( ) is True : raise WThreadJoiningTimeoutError ( 'Thread is still alive. Thread name: %s' % self . __thread . name ) self . start_event ( ) . clear ( ) self . __thread = None
Clear all object descriptors for stopped task . Task must be joined prior to calling this method .
7,971
def trace_module ( no_print = True ) : pwd = os . path . dirname ( __file__ ) script_name = os . path . join ( pwd , "test_my_module.py" ) with pexdoc . ExDocCxt ( ) as exdoc_obj : if pytest . main ( [ "-s" , "-vv" , "-x" , "{0}" . format ( script_name ) ] ) : raise RuntimeError ( "Tracing did not complete successfully" ) if not no_print : module_prefix = "docs.support.my_module." callable_names = [ "func" , "MyClass.value" ] for callable_name in callable_names : callable_name = module_prefix + callable_name print ( "\nCallable: {0}" . format ( callable_name ) ) print ( exdoc_obj . get_sphinx_doc ( callable_name , width = 70 ) ) print ( "\n" ) return copy . copy ( exdoc_obj )
Trace my_module exceptions .
7,972
def header_name_check ( header_name ) : header_match = WHTTPHeaders . header_name_re . match ( header_name . encode ( 'us-ascii' ) ) return len ( header_name ) > 0 and header_match is not None
Check header name for validity . Return True if name is valid
7,973
def remove_headers ( self , header_name ) : if self . __ro_flag : raise RuntimeError ( 'ro' ) header_name = self . normalize_name ( header_name ) if header_name in self . __headers . keys ( ) : self . __headers . pop ( header_name )
Remove header by its name
7,974
def add_headers ( self , header_name , value , * values ) : if self . __ro_flag : raise RuntimeError ( 'ro' ) header_name = self . normalize_name ( header_name ) if header_name not in self . __headers . keys ( ) : self . __headers [ header_name ] = [ value ] else : self . __headers [ header_name ] . append ( value ) for single_value in values : self . __headers [ header_name ] . append ( single_value )
Add new header
7,975
def get_headers ( self , header_name ) : header_name = self . normalize_name ( header_name ) if header_name in self . __headers . keys ( ) : return tuple ( self . __headers [ header_name ] )
Return header value by its name
7,976
def switch_name_style ( self , http_protocol_version ) : new_headers = WHTTPHeaders ( ) new_headers . __normalization_mode = http_protocol_version names = self . headers ( ) for name in names : new_headers . add_headers ( name , * self . get_headers ( name ) ) for cookie_name in self . __set_cookies . cookies ( ) : new_headers . __set_cookies . add_cookie ( self . __set_cookies [ cookie_name ] . copy ( ) ) return new_headers
Return object copy with header names saved as it is described in the given protocol version
7,977
def ro ( self ) : ro_headers = WHTTPHeaders ( ) names = self . headers ( ) for name in names : ro_headers . add_headers ( name , * self . get_headers ( name ) ) ro_headers . __cookies = self . __set_cookies . ro ( ) ro_headers . __ro_flag = True return ro_headers
Return read - only copy of this object
7,978
def client_cookie_jar ( self ) : cookie_jar = WHTTPCookieJar ( ) cookie_header = self . get_headers ( 'Cookie' ) for cookie_string in ( cookie_header if cookie_header is not None else tuple ( ) ) : for single_cookie in WHTTPCookieJar . import_header_text ( cookie_string ) : cookie_jar . add_cookie ( single_cookie ) return cookie_jar . ro ( )
Return internal cookie jar that must be used as HTTP - request cookies
7,979
def import_headers ( cls , http_code ) : headers = WHTTPHeaders ( ) message = email . message_from_file ( StringIO ( http_code ) ) for header_name , header_value in message . items ( ) : headers . add_headers ( header_name , header_value ) cookie_header = headers . get_headers ( 'Set-Cookie' ) if cookie_header is not None : for cookie_string in cookie_header : for single_cookie in WHTTPCookieJar . import_header_text ( cookie_string ) : headers . set_cookie_jar ( ) . add_cookie ( single_cookie ) headers . remove_headers ( 'Set-Cookie' ) return headers
Create WHTTPHeaders by the given code . If code has Set - Cookie headers that headers are parsed data are stored in internal cookie jar . At the end of parsing Set - Cookie headers are removed from the result
7,980
def trace_module ( no_print = True ) : with pexdoc . ExDocCxt ( ) as exdoc_obj : try : docs . support . my_module . func ( "John" ) obj = docs . support . my_module . MyClass ( ) obj . value = 5 obj . value except : raise RuntimeError ( "Tracing did not complete successfully" ) if not no_print : module_prefix = "docs.support.my_module." callable_names = [ "func" , "MyClass.value" ] for callable_name in callable_names : callable_name = module_prefix + callable_name print ( "\nCallable: {0}" . format ( callable_name ) ) print ( exdoc_obj . get_sphinx_doc ( callable_name , width = 70 ) ) print ( "\n" ) return copy . copy ( exdoc_obj )
Trace my_module_original exceptions .
7,981
def ro ( self ) : request = WWebRequest ( self . session ( ) , self . method ( ) , self . path ( ) , headers = self . headers ( ) . ro ( ) , request_data = self . request_data ( ) ) request . __ro_flag = True return request
Create read - only copy
7,982
def simple_contact ( request , username = "" ) : site = Site . objects . get_current ( ) form = ContactForm ( request . POST or None ) UserModel = get_user_model ( ) recipients = [ ] site_form = False logger . debug ( 'Recipients should be empty: %s' % recipients ) if request . user . is_authenticated : try : name = request . user . display_name except AttributeError : name = request . user . username form . fields [ 'sender_name' ] . widget . attrs [ 'readonly' ] = 'true' form . fields [ 'sender_name' ] . initial = name form . fields [ 'sender_email' ] . widget . attrs [ 'readonly' ] = 'true' form . fields [ 'sender_email' ] . initial = request . user . email if username : member = get_object_or_404 ( UserModel , username = username ) recipients = [ member . email , ] logger . debug ( 'Recipients should be a single user: %s' % recipients ) else : site_form = True member = None recipients = getattr ( settings , "DEFAULT_CONTACTS" , None ) logger . debug ( 'Recipients should be match DEFAULT_CONTACTS: %s' % recipients ) if not recipients : recipients = UserModel . objects . filter ( is_superuser = True ) . values_list ( 'email' , flat = True ) warnings . warn ( "settings.DEFAULT_CONTACTS does not exist. You may want to create it." , RuntimeWarning ) logger . debug ( 'Recipients should be superusers: %s' % recipients ) if form . is_valid ( ) : if site_form : subject = "A {} contact form submission from {}" . format ( site . name , form . cleaned_data [ 'sender_name' ] ) else : subject = "A message from {} on {}" . format ( form . cleaned_data [ 'sender_name' ] , site . name ) body = form . cleaned_data [ 'body' ] sender_email = form . cleaned_data [ 'sender_email' ] if 'send_a_copy' in request . POST : recipients . append ( sender_email ) logger . debug ( 'Recipients should be match prior + sender email: %s' % recipients ) mail = EmailMessage ( subject = subject , body = body , from_email = sender_email , to = recipients ) mail . send ( ) return HttpResponseRedirect ( success_url ) return render ( request , 'contact/simple_form.html' , { 'form' : form , 'site' : site , 'member' : member } )
Defines simple contact form that can be used to contact a site member passed by username in the URL or to all superusers or to a list defined in settings . DEFAULT_CONTACTS .
7,983
def build_contact ( request , slug = "" ) : controller = get_object_or_404 ( ContactFormController , slug = slug ) site = Site . objects . get_current ( ) UserModel = get_user_model ( ) user = request . user form = ContactForm ( request . POST or None , request . FILES or None , controller = controller ) if user . is_authenticated : try : name = user . display_name except AttributeError : name = user . username form . fields [ 'sender_name' ] . widget . attrs [ 'readonly' ] = 'true' form . fields [ 'sender_name' ] . initial = name form . fields [ 'sender_email' ] . widget . attrs [ 'readonly' ] = 'true' form . fields [ 'sender_email' ] . initial = user . email if form . is_valid ( ) : if controller . store_in_db : new_msg = Contact ( ** form . cleaned_data ) new_msg . controller = controller new_msg . site = site if controller . override_subject : new_msg . subject = controller . override_subject new_msg . save ( ) if controller . send_emails : form_data = form . cleaned_data if controller . override_subject : subject = controller . override_subject elif 'subject' in form_data : subject = form_data [ 'subject' ] else : subject = "{} message from {}" . format ( controller . name , form_data [ 'sender_name' ] ) body = "{} \n\n {}" . format ( form_data [ 'body' ] , form_data [ 'sender_name' ] ) if controller . request_contact_info : body += "\nAddress: {} \nCity: {} \nState: {} \nPhone: {}" . format ( form_data [ 'contact_address' ] , form_data [ 'contact_city' ] , form_data [ 'contact_state' ] , form_data [ 'contact_phone' ] ) if controller . email_options == '2' : try : to = [ UserModel . objects . get ( username = form . cleaned_data [ 'to' ] ) . email ] except Exception : to = [ form . cleaned_data [ 'to' ] ] if controller . email_options == '1' : to = [ r . email for r in controller . recipients . all ( ) ] for r in controller . other_recipients . all ( ) : to . append ( r . email ) if 'send_a_copy' in form . cleaned_data : to . append ( form . cleaned_data [ 'sender_email' ] ) mail = EmailMessage ( subject = subject , body = body , from_email = form . cleaned_data [ 'sender_email' ] , to = to ) if 'photo' in request . FILES : photo = request . FILES [ 'photo' ] mail . attach ( photo . name , photo . read ( ) , photo . content_type ) mail . send ( ) return render ( request , 'success_url' , { 'controller' : controller } ) return render ( request , 'contact/form.html' , { 'form' : form , 'site' : site , 'controller' : controller } )
Builds appropriate contact form based on options set in the contact_form controller .
7,984
def pale_webapp2_request_handler_generator ( pale_endpoint ) : def pale_handler ( self , * args , ** kwargs ) : if self . request . method == "OPTIONS" : origin = self . request . headers . get ( "Origin" , None ) self . response . headers [ 'Access-Control-Allow-Origin' ] = origin self . response . headers [ 'Access-Control-Allow-Headers' ] = 'Origin, X-Requested-With, Content-Type, Accept' self . response . headers [ 'Access-Control-Allow-Methods' ] = 'POST, GET, PUT, DELETE' self . response . headers [ 'Access-Control-Allow-Credentials' ] = 'true' return self . response try : return pale_endpoint . _execute ( self . request ) finally : pale_endpoint . _finally ( ) cls = type ( pale_endpoint . _route_name , ( webapp2 . RequestHandler , ) , dict ( pale_handler = pale_handler ) ) return cls
Generate a webapp2 . RequestHandler class for the pale endpoint .
7,985
def bind_pale_to_webapp2 ( pale_app_module , webapp_wsgiapplication , route_prefix = None ) : if not isinstance ( webapp_wsgiapplication , webapp2 . WSGIApplication ) : raise TypeError ( "pale.adapters.webapp2.bind_pale_to_webapp2 expected " "the passed in webapp_wsgiapplication to be an instance of " "WSGIApplication, but it was an instance of %s instead." % ( type ( webapp_wsgiapplication ) , ) ) if not pale . is_pale_module ( pale_app_module ) : raise TypeError ( "pale.adapters.webapp2.bind_pale_to_webapp2 expected " "the passed in pale_app_module to be a Python module with a " "`_module_type` value equal to `pale.ImplementationModule`, " "but it found an instance of %s instead." % ( type ( pale_app_module ) , ) ) endpoints = pale . extract_endpoints ( pale_app_module ) for endpoint in endpoints : endpoint . _set_response_class ( RESPONSE_CLASS ) method = endpoint . _http_method name = endpoint . _route_name req_handler = pale_webapp2_request_handler_generator ( endpoint ) route_uri = endpoint . _uri if route_prefix is not None : route_uri = "%s%s" % ( route_prefix , route_uri ) route = webapp2 . Route ( route_uri , handler = req_handler , name = name , handler_method = 'pale_handler' , methods = [ method , "OPTIONS" ] ) webapp_wsgiapplication . router . add ( route )
Binds a Pale API implementation to a webapp2 WSGIApplication
7,986
def encode ( self , envelope , session , target = None , modification_code = None , ** kwargs ) : self . __args_check ( envelope , target , modification_code ) if isinstance ( envelope , WMessengerTextEnvelope ) : target_envelope_cls = WMessengerTextEnvelope else : target_envelope_cls = WMessengerBytesEnvelope if target == WMessengerFixedModificationLayer . Target . head : return target_envelope_cls ( modification_code + envelope . message ( ) , meta = envelope ) else : return target_envelope_cls ( envelope . message ( ) + modification_code , meta = envelope )
Methods appends modification_code to the specified envelope .
7,987
def decode ( self , envelope , session , target = None , modification_code = None , ** kwargs ) : self . __args_check ( envelope , target , modification_code ) message = envelope . message ( ) if len ( message ) < len ( modification_code ) : raise ValueError ( 'Invalid message length' ) if isinstance ( envelope , WMessengerTextEnvelope ) : target_envelope_cls = WMessengerTextEnvelope else : target_envelope_cls = WMessengerBytesEnvelope if target == WMessengerFixedModificationLayer . Target . head : if message [ : len ( modification_code ) ] != modification_code : raise ValueError ( 'Invalid header in message' ) return target_envelope_cls ( message [ len ( modification_code ) : ] , meta = envelope ) else : if message [ - len ( modification_code ) : ] != modification_code : raise ValueError ( 'Invalid tail in message' ) return target_envelope_cls ( message [ : - len ( modification_code ) ] , meta = envelope )
Methods checks envelope for modification_code existence and removes it .
7,988
def started_tasks ( self , task_registry_id = None , task_cls = None ) : if task_registry_id is not None : task = None for registered_task in self . __started : if registered_task . __registry_tag__ == task_registry_id : task = registered_task if task_cls is not None and task is not None : if isinstance ( task , task_cls ) is True : return task return None return task result = filter ( lambda x : x is not None , self . __started ) if task_cls is not None : result = filter ( lambda x : isinstance ( x , task_cls ) , result ) return tuple ( result )
Return tasks that was started . Result way be filtered by the given arguments .
7,989
def stop_task ( self , task_tag , stop_dependent = True , stop_requirements = False ) : task = self . started_tasks ( task_registry_id = task_tag ) if task is None : return def stop ( task_to_stop ) : if task_to_stop in self . __started : if isinstance ( task_to_stop , WStoppableTask ) is True : task_to_stop . stop ( ) self . __started . remove ( task_to_stop ) def stop_dependency ( task_to_stop ) : deeper_dependencies = [ ] for dependent_task in self . __started : if task_to_stop . __registry_tag__ in dependent_task . __class__ . __dependency__ : deeper_dependencies . append ( dependent_task ) for dependent_task in deeper_dependencies : stop_dependency ( dependent_task ) stop ( task_to_stop ) def calculate_requirements ( task_to_stop , cross_requirements = False ) : requirements = set ( ) for dependent_task in self . __started : if dependent_task . __class__ . __registry_tag__ in task_to_stop . __class__ . __dependency__ : requirements . add ( dependent_task ) if cross_requirements is True : return requirements result = set ( ) for task_a in requirements : requirement_match = False for task_b in requirements : if task_a . __class__ . __registry_tag__ in task_b . __class__ . __dependency__ : requirement_match = True break if requirement_match is False : result . add ( task_a ) return result def calculate_priorities ( task_to_stop , * extra_tasks , current_result = None , requirements_left = None ) : if current_result is None : current_result = [ ] tasks_to_stop = [ task_to_stop ] if len ( extra_tasks ) > 0 : tasks_to_stop . extend ( extra_tasks ) current_result . append ( list ( tasks_to_stop ) ) all_requirements = calculate_requirements ( tasks_to_stop [ 0 ] , cross_requirements = True ) nested_requirements = calculate_requirements ( tasks_to_stop [ 0 ] ) for dependent_task in tasks_to_stop [ 1 : ] : nested_requirements = nested_requirements . union ( calculate_requirements ( dependent_task ) ) all_requirements . update ( calculate_requirements ( dependent_task , cross_requirements = True ) ) all_requirements = all_requirements . difference ( nested_requirements ) if requirements_left is not None : requirements_left = requirements_left . difference ( all_requirements ) nested_requirements . update ( requirements_left ) if len ( nested_requirements ) == 0 : return current_result return calculate_priorities ( * list ( nested_requirements ) , current_result = current_result , requirements_left = all_requirements ) if stop_dependent is True : stop_dependency ( task ) if stop_requirements is True : for task_list in calculate_priorities ( task ) : for single_task in task_list : stop ( single_task ) if stop_dependent is not True : stop ( task )
Stop task with the given task tag . If task already stopped then nothing happens .
7,990
def start_task ( cls , task_tag , skip_unresolved = False ) : registry = cls . registry_storage ( ) registry . start_task ( task_tag , skip_unresolved = skip_unresolved )
Start task from registry
7,991
def stop_task ( cls , task_tag , stop_dependent = True , stop_requirements = False ) : registry = cls . registry_storage ( ) registry . stop_task ( task_tag , stop_dependent = stop_dependent , stop_requirements = stop_requirements )
Stop started task from registry
7,992
def snip_this ( tag = "" , write_date = True ) : snip ( tag = tag , start = - 1 , write_date = write_date )
When this function is invoced in a notebook cell the cell is snipped .
7,993
def unsnip ( tag = None , start = - 1 ) : import IPython i = IPython . get_ipython ( ) if tag in _tagged_inputs . keys ( ) : if len ( _tagged_inputs [ tag ] ) > 0 : i . set_next_input ( _tagged_inputs [ tag ] [ start ] ) else : if len ( _last_inputs ) > 0 : i . set_next_input ( _last_inputs [ start ] )
This function retrieves a tagged or untagged snippet .
7,994
def alert ( msg , body = "" , icon = None ) : if type ( body ) == str : body = body [ : 200 ] if call ( [ "which" , "notify-send" ] ) == 0 : if icon is not None : call ( [ "notify-send" , msg , "-i" , icon , body ] ) else : call ( [ "notify-send" , msg , body ] ) else : print ( ( "ALERT: " , msg ) )
alerts the user of something happening via notify - send . If it is not installed the alert will be printed to the console .
7,995
def recgen ( gen , fix_type_errors = True ) : if not hasattr ( gen , '__iter__' ) : yield gen else : try : for i in gen : for ii in recgen ( i ) : yield ii except TypeError : if not fix_type_errors : raise yield gen
Iterates through generators recursively and flattens them .
7,996
def list_of_dicts_to_dict_of_lists ( list_of_dictionaries ) : result = { } all_keys = set ( [ k for d in list_of_dictionaries for k in d . keys ( ) ] ) for d in list_of_dictionaries : for k in all_keys : result . setdefault ( k , [ ] ) . append ( d . get ( k , None ) ) return result
Takes a list of dictionaries and creates a dictionary with the combined values for each key in each dicitonary . Missing values are set to None for each dicitonary that does not contain a key that is present in at least one other dicitonary .
7,997
def dict_of_lists_to_list_of_dicts ( dictionary_of_lists ) : return [ { key : dictionary_of_lists [ key ] [ index ] if len ( dictionary_of_lists [ key ] ) > index else None for key in dictionary_of_lists . keys ( ) } for index in range ( max ( map ( len , dictionary_of_lists . values ( ) ) ) ) ]
Takes a dictionary of lists and creates a list of dictionaries . If the lists are of unequal length the remaining entries are set to None .
7,998
def colorate ( sequence , colormap = "" , start = 0 , length = None ) : n = start colors = color_space ( colormap , sequence , start = 0.1 , stop = 0.9 , length = length ) for elem in sequence : yield n , colors [ n - start ] , elem n += 1
like enumerate but with colors
7,999
def generate ( self , ** kwargs ) : import collections all_params = cartesian_dicts ( { k : kwargs [ k ] for k in kwargs . keys ( ) if isinstance ( kwargs [ k ] , collections . Iterable ) } ) for pi , p in enumerate ( all_params ) : if self . name_mode == 'int' : n = str ( len ( self . containers ) ) else : n = None self . containers . append ( PDContainer ( name = n , params = p , parent = self ) ) self . parameters . update ( { k : kwargs [ k ] for k in kwargs . keys ( ) if not isinstance ( kwargs [ k ] , collections . Iterable ) } ) self . save ( )
run once to create all children containers for each combination of the keywords