idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
3,900
def calc_stress_tf ( self , lin , lout , damped ) : tf = self . calc_strain_tf ( lin , lout ) if damped : tf *= lout . layer . comp_shear_mod else : tf *= lout . layer . shear_mod return tf
Compute the stress transfer function .
3,901
def calc_strain_tf ( self , lin , lout ) : assert lout . wave_field == WaveField . within ang_freqs = self . motion . angular_freqs cterm = 1j * self . _wave_nums [ lout . index , : ] * lout . depth_within numer = ( 1j * self . _wave_nums [ lout . index , : ] * ( self . _waves_a [ lout . index , : ] * np . exp ( cterm ) - self . _waves_b [ lout . index , : ] * np . exp ( - cterm ) ) ) denom = - ang_freqs ** 2 * self . wave_at_location ( lin ) mask = ~ np . isclose ( ang_freqs , 0 ) tf = np . zeros_like ( mask , dtype = np . complex ) tf [ mask ] = GRAVITY * numer [ mask ] / denom [ mask ] return tf
Compute the strain transfer function from lout to location_in .
3,902
def _estimate_strains ( self ) : for l in self . _profile : l . reset ( ) l . strain = self . _motion . pgv / l . initial_shear_vel
Compute an estimate of the strains .
3,903
def _calc_strain ( self , loc_input , loc_layer , motion , * args ) : strain_max = self . _calc_strain_max ( loc_input , loc_layer , motion , * args ) return self . strain_ratio * strain_max
Compute the strain used for iterations of material properties .
3,904
def _calc_strain_max ( self , loc_input , loc_layer , motion , * args ) : return motion . calc_peak ( self . calc_strain_tf ( loc_input , loc_layer ) )
Compute the effective strain at the center of a layer .
3,905
def _estimate_strains ( self ) : eql = EquivalentLinearCalculator ( ) eql ( self . _motion , self . _profile , self . _loc_input )
Estimate the strains by running an EQL site response .
3,906
def timeit ( method ) : import datetime @ functools . wraps ( method ) def timed_method ( self , rinput ) : time_start = datetime . datetime . utcnow ( ) result = method ( self , rinput ) time_end = datetime . datetime . utcnow ( ) result . time_it ( time_start , time_end ) self . logger . info ( 'total time measured' ) return result return timed_method
Decorator to measure the time used by the recipe
3,907
def save_intermediate_img ( self , img , name ) : if self . intermediate_results : img . writeto ( name , overwrite = True )
Save intermediate FITS objects .
3,908
def save_intermediate_array ( self , array , name ) : if self . intermediate_results : fits . writeto ( name , array , overwrite = True )
Save intermediate array object as FITS .
3,909
def build_recipe_input ( self , ob , dal ) : result = { } ob_query_skip = False ob_query_field = 'obresult' if isinstance ( ob , ObservingBlock ) : import numina . types . obsresult as obtype for key , req in self . requirements ( ) . items ( ) : if isinstance ( req . type , obtype . ObservationResultType ) : ob_query_field = key ob_query_skip = True query_option = self . query_options . get ( key ) new_or = ObservationResult ( ) new_or . __dict__ = ob . __dict__ obsres = req . query ( dal , new_or , options = query_option ) tagger = self . mode . tagger if tagger is not None : self . logger . debug ( 'Use mode tagger to fill tags in OB' ) obsres . tags = tagger ( obsres ) else : obsres . tags = None break else : obsres = ob else : obsres = ob self . logger . debug ( 'getting query fields per REQ' ) qfields = set ( ) for key , req in self . requirements ( ) . items ( ) : tag_n = req . tag_names ( ) self . logger . debug ( "%s has these query fields %s" , key , tag_n ) qfields . update ( tag_n ) if obsres . tags is None : self . logger . debug ( 'running recipe tagger' ) self . logger . debug ( 'with query fields %s' , qfields ) if qfields : obsres . tags = self . obsres_extractor ( obsres , qfields ) else : obsres . tags = { } for key , req in self . requirements ( ) . items ( ) : try : query_option = self . query_options . get ( key ) if key == ob_query_field and ob_query_skip : result [ key ] = obsres else : result [ key ] = req . query ( dal , obsres , options = query_option ) except NoResultFound as notfound : req . on_query_not_found ( notfound ) return self . create_input ( ** result )
Build a RecipeInput object .
3,910
def subsets_of_fileinfo_from_txt ( filename ) : if not os . path . isfile ( filename ) : raise ValueError ( "File " + filename + " not found!" ) with open ( filename ) as f : file_content = f . read ( ) . splitlines ( ) dict_of_subsets_of_fileinfo = { } label = None sublist_of_fileinfo = [ ] idict = 0 ifiles = 0 nfiles = 0 sublist_finished = True for line in file_content : if len ( line ) > 0 : if line [ 0 ] != '#' : if label is None : if line [ 0 ] == "@" : nfiles = int ( line [ 1 : ] . split ( ) [ 0 ] ) label = line [ 1 : ] . split ( ) [ 1 ] sublist_of_fileinfo = [ ] ifiles = 0 sublist_finished = False else : raise ValueError ( "Expected @ symbol not found!" ) else : if line [ 0 ] == "@" : raise ValueError ( "Unexpected @ symbol found!" ) tmplist = line . split ( ) tmpfile = tmplist [ 0 ] if len ( tmplist ) > 1 : tmpinfo = tmplist [ 1 : ] else : tmpinfo = None if not os . path . isfile ( tmpfile ) : raise ValueError ( "File " + tmpfile + " not found!" ) sublist_of_fileinfo . append ( FileInfo ( tmpfile , tmpinfo ) ) ifiles += 1 if ifiles == nfiles : dict_of_subsets_of_fileinfo [ idict ] = { } tmpdict = dict_of_subsets_of_fileinfo [ idict ] tmpdict [ 'label' ] = label tmpdict [ 'list_of_fileinfo' ] = sublist_of_fileinfo idict += 1 label = None sublist_of_fileinfo = [ ] ifiles = 0 sublist_finished = True if not sublist_finished : raise ValueError ( "Unexpected end of sublist of files." ) return dict_of_subsets_of_fileinfo
Returns a dictionary with subsets of FileInfo instances from a TXT file .
3,911
def subarray_match ( shape , ref , sshape , sref = None ) : ref1 = asarray ( ref , dtype = 'int' ) if sref is not None : ref2 = asarray ( sref , dtype = 'int' ) else : ref2 = zeros_like ( ref1 ) offset = ref1 - ref2 urc1 = minimum ( offset + asarray ( sshape ) - 1 , asarray ( shape ) - 1 ) blc1 = maximum ( offset , 0 ) urc2 = urc1 - offset blc2 = blc1 - offset def valid_slice ( b , u ) : if b >= u + 1 : return None else : return slice ( b , u + 1 ) f = tuple ( valid_slice ( b , u ) for b , u in zip ( blc1 , urc1 ) ) s = tuple ( valid_slice ( b , u ) for b , u in zip ( blc2 , urc2 ) ) if not all ( f ) or not all ( s ) : return ( None , None ) return ( f , s )
Compute the slice representation of intersection of two arrays .
3,912
def rebin_scale ( a , scale = 1 ) : newshape = tuple ( ( side * scale ) for side in a . shape ) return rebin ( a , newshape )
Scale an array to a new shape .
3,913
def rebin ( a , newshape ) : slices = [ slice ( 0 , old , float ( old ) / new ) for old , new in zip ( a . shape , newshape ) ] coordinates = numpy . mgrid [ slices ] indices = coordinates . astype ( 'i' ) return a [ tuple ( indices ) ]
Rebin an array to a new shape .
3,914
def fixpix ( data , mask , kind = 'linear' ) : if data . shape != mask . shape : raise ValueError if not numpy . any ( mask ) : return data x = numpy . arange ( 0 , data . shape [ 0 ] ) for row , mrow in zip ( data , mask ) : if numpy . any ( mrow ) : valid = ( mrow == numpy . False_ ) invalid = ( mrow == numpy . True_ ) itp = interp1d ( x [ valid ] , row [ valid ] , kind = kind , copy = False ) row [ invalid ] = itp ( x [ invalid ] ) . astype ( row . dtype ) return data
Interpolate 2D array data in rows
3,915
def fixpix2 ( data , mask , iterations = 3 , out = None ) : out = out if out is not None else data . copy ( ) binry = mask != 0 lblarr , labl = ndimage . label ( binry ) stct = ndimage . generate_binary_structure ( 2 , 2 ) back = lblarr == 0 for idx in range ( 1 , labl + 1 ) : segm = lblarr == idx dilmask = numpy . logical_or ( back , segm ) more = ndimage . binary_dilation ( segm , stct , iterations = iterations , mask = dilmask ) border = numpy . logical_and ( more , numpy . logical_not ( segm ) ) xi , yi = border . nonzero ( ) calc = FitOne ( xi , yi , out [ xi , yi ] ) xi , yi = segm . nonzero ( ) out [ segm ] = calc ( xi , yi ) return out
Substitute pixels in mask by a bilinear least square fitting .
3,916
def numberarray ( x , shape ) : try : iter ( x ) except TypeError : return numpy . ones ( shape ) * x else : return x
Return x if it is an array or create an array and fill it with x .
3,917
def get_token ( request ) : if ( not request . META . get ( header_name_to_django ( auth_token_settings . HEADER_NAME ) ) and config . CHAMBER_MULTIDOMAINS_OVERTAKER_AUTH_COOKIE_NAME ) : ovetaker_auth_token = request . COOKIES . get ( config . CHAMBER_MULTIDOMAINS_OVERTAKER_AUTH_COOKIE_NAME ) token = get_object_or_none ( Token , key = ovetaker_auth_token , is_active = True ) if utils . get_user_from_token ( token ) . is_authenticated ( ) : return token return utils . get_token ( request )
Returns the token model instance associated with the given request token key . If no user is retrieved AnonymousToken is returned .
3,918
def process_request ( self , request ) : request . token = get_token ( request ) request . user = SimpleLazyObject ( lambda : get_user ( request ) ) request . _dont_enforce_csrf_checks = dont_enforce_csrf_checks ( request )
Lazy set user and token
3,919
def ximplotxy_jupyter ( x , y , fmt = None , ** args ) : using_jupyter = True if fmt is None : return ximplotxy ( x , y , using_jupyter = using_jupyter , ** args ) else : return ximplotxy ( x , y , fmt , using_jupyter = using_jupyter , ** args )
Auxiliary function to call ximplotxy from a jupyter notebook .
3,920
def atomic ( func ) : try : from reversion . revisions import create_revision return transaction . atomic ( create_revision ( ) ( func ) ) except ImportError : return transaction . atomic ( func )
Decorator helper that overrides django atomic decorator and automatically adds create revision .
3,921
def atomic_with_signals ( func ) : try : from reversion . revisions import create_revision return transaction . atomic ( create_revision ( ) ( transaction_signals ( func ) ) ) except ImportError : return transaction . atomic ( transaction_signals ( func ) )
Atomic decorator with transaction signals .
3,922
def parse_from_file ( root_processor , xml_file_path , encoding = 'utf-8' ) : with open ( xml_file_path , 'r' , encoding = encoding ) as xml_file : xml_string = xml_file . read ( ) parsed_value = parse_from_string ( root_processor , xml_string ) return parsed_value
Parse the XML file using the processor starting from the root of the document .
3,923
def parse_from_string ( root_processor , xml_string ) : if not _is_valid_root_processor ( root_processor ) : raise InvalidRootProcessor ( 'Invalid root processor' ) parseable_xml_string = xml_string if _PY2 and isinstance ( xml_string , Text ) : parseable_xml_string = xml_string . encode ( 'utf-8' ) root = ET . fromstring ( parseable_xml_string ) _xml_namespace_strip ( root ) state = _ProcessorState ( ) state . push_location ( root_processor . element_path ) return root_processor . parse_at_root ( root , state )
Parse the XML string using the processor starting from the root of the document .
3,924
def serialize_to_file ( root_processor , value , xml_file_path , encoding = 'utf-8' , indent = None ) : serialized_value = serialize_to_string ( root_processor , value , indent ) with open ( xml_file_path , 'w' , encoding = encoding ) as xml_file : xml_file . write ( serialized_value )
Serialize the value to an XML file using the root processor .
3,925
def serialize_to_string ( root_processor , value , indent = None ) : if not _is_valid_root_processor ( root_processor ) : raise InvalidRootProcessor ( 'Invalid root processor' ) state = _ProcessorState ( ) state . push_location ( root_processor . element_path ) root = root_processor . serialize ( value , state ) state . pop_location ( ) serialized_value = ET . tostring ( root , encoding = 'utf-8' ) if indent : serialized_value = minidom . parseString ( serialized_value ) . toprettyxml ( indent = indent , encoding = 'utf-8' ) return serialized_value . decode ( 'utf-8' )
Serialize the value to an XML string using the root processor .
3,926
def array ( item_processor , alias = None , nested = None , omit_empty = False , hooks = None ) : processor = _Array ( item_processor , alias , nested , omit_empty ) return _processor_wrap_if_hooks ( processor , hooks )
Create an array processor that can be used to parse and serialize array data .
3,927
def boolean ( element_name , attribute = None , required = True , alias = None , default = False , omit_empty = False , hooks = None ) : return _PrimitiveValue ( element_name , _parse_boolean , attribute , required , alias , default , omit_empty , hooks )
Create a processor for boolean values .
3,928
def dictionary ( element_name , children , required = True , alias = None , hooks = None ) : processor = _Dictionary ( element_name , children , required , alias ) return _processor_wrap_if_hooks ( processor , hooks )
Create a processor for dictionary values .
3,929
def floating_point ( element_name , attribute = None , required = True , alias = None , default = 0.0 , omit_empty = False , hooks = None ) : value_parser = _number_parser ( float ) return _PrimitiveValue ( element_name , value_parser , attribute , required , alias , default , omit_empty , hooks )
Create a processor for floating point values .
3,930
def integer ( element_name , attribute = None , required = True , alias = None , default = 0 , omit_empty = False , hooks = None ) : value_parser = _number_parser ( int ) return _PrimitiveValue ( element_name , value_parser , attribute , required , alias , default , omit_empty , hooks )
Create a processor for integer values .
3,931
def named_tuple ( element_name , tuple_type , child_processors , required = True , alias = None , hooks = None ) : converter = _named_tuple_converter ( tuple_type ) processor = _Aggregate ( element_name , converter , child_processors , required , alias ) return _processor_wrap_if_hooks ( processor , hooks )
Create a processor for namedtuple values .
3,932
def string ( element_name , attribute = None , required = True , alias = None , default = '' , omit_empty = False , strip_whitespace = True , hooks = None ) : value_parser = _string_parser ( strip_whitespace ) return _PrimitiveValue ( element_name , value_parser , attribute , required , alias , default , omit_empty , hooks )
Create a processor for string values .
3,933
def user_object ( element_name , cls , child_processors , required = True , alias = None , hooks = None ) : converter = _user_object_converter ( cls ) processor = _Aggregate ( element_name , converter , child_processors , required , alias ) return _processor_wrap_if_hooks ( processor , hooks )
Create a processor for user objects .
3,934
def _element_append_path ( start_element , element_names ) : end_element = start_element for element_name in element_names : new_element = ET . Element ( element_name ) end_element . append ( new_element ) end_element = new_element return end_element
Append the list of element names as a path to the provided start element .
3,935
def _element_find_from_root ( root , element_path ) : element = None element_names = element_path . split ( '/' ) if element_names [ 0 ] == root . tag : if len ( element_names ) > 1 : element = root . find ( '/' . join ( element_names [ 1 : ] ) ) else : element = root return element
Find the element specified by the given path starting from the root element of the document .
3,936
def _element_get_or_add_from_parent ( parent , element_path ) : element_names = element_path . split ( '/' ) existing_element = None previous_element = parent for i , element_name in enumerate ( element_names ) : existing_element = previous_element . find ( element_name ) if existing_element is None : existing_element = _element_append_path ( previous_element , element_names [ i : ] ) break previous_element = existing_element assert existing_element is not None return existing_element
Ensure all elements specified in the given path relative to the provided parent element exist .
3,937
def _element_path_create_new ( element_path ) : element_names = element_path . split ( '/' ) start_element = ET . Element ( element_names [ 0 ] ) end_element = _element_append_path ( start_element , element_names [ 1 : ] ) return start_element , end_element
Create an entirely new element path .
3,938
def _hooks_apply_after_parse ( hooks , state , value ) : if hooks and hooks . after_parse : return hooks . after_parse ( ProcessorStateView ( state ) , value ) return value
Apply the after parse hook .
3,939
def _hooks_apply_before_serialize ( hooks , state , value ) : if hooks and hooks . before_serialize : return hooks . before_serialize ( ProcessorStateView ( state ) , value ) return value
Apply the before serialize hook .
3,940
def _named_tuple_converter ( tuple_type ) : def _from_dict ( dict_value ) : if dict_value : return tuple_type ( ** dict_value ) return None def _to_dict ( value ) : if value : return value . _asdict ( ) return { } converter = _AggregateConverter ( from_dict = _from_dict , to_dict = _to_dict ) return converter
Return an _AggregateConverter for named tuples of the given type .
3,941
def _number_parser ( str_to_number_func ) : def _parse_number_value ( element_text , state ) : value = None try : value = str_to_number_func ( element_text ) except ( ValueError , TypeError ) : state . raise_error ( InvalidPrimitiveValue , 'Invalid numeric value "{}"' . format ( element_text ) ) return value return _parse_number_value
Return a function to parse numbers .
3,942
def _parse_boolean ( element_text , state ) : value = None lowered_text = element_text . lower ( ) if lowered_text == 'true' : value = True elif lowered_text == 'false' : value = False else : state . raise_error ( InvalidPrimitiveValue , 'Invalid boolean value "{}"' . format ( element_text ) ) return value
Parse the raw XML string as a boolean value .
3,943
def _string_parser ( strip_whitespace ) : def _parse_string_value ( element_text , _state ) : if element_text is None : value = '' elif strip_whitespace : value = element_text . strip ( ) else : value = element_text return value return _parse_string_value
Return a parser function for parsing string values .
3,944
def _user_object_converter ( cls ) : def _from_dict ( dict_value ) : try : object_value = cls ( ** dict_value ) except TypeError : object_value = cls ( ) for field_name , field_value in dict_value . items ( ) : setattr ( object_value , field_name , field_value ) return object_value def _to_dict ( value ) : if value : return value . __dict__ return { } return _AggregateConverter ( from_dict = _from_dict , to_dict = _to_dict )
Return an _AggregateConverter for a user object of the given class .
3,945
def _xml_namespace_strip ( root ) : if '}' not in root . tag : return for element in root . iter ( ) : if '}' in element . tag : element . tag = element . tag . split ( '}' ) [ 1 ] else : pass
Strip the XML namespace prefix from all element tags under the given root Element .
3,946
def parse_at_element ( self , element , state ) : parsed_dict = self . _dictionary . parse_at_element ( element , state ) return self . _converter . from_dict ( parsed_dict )
Parse the provided element as an aggregate .
3,947
def parse_at_root ( self , root , state ) : parsed_dict = self . _dictionary . parse_at_root ( root , state ) return self . _converter . from_dict ( parsed_dict )
Parse the root XML element as an aggregate .
3,948
def parse_from_parent ( self , parent , state ) : parsed_dict = self . _dictionary . parse_from_parent ( parent , state ) return self . _converter . from_dict ( parsed_dict )
Parse the aggregate from the provided parent XML element .
3,949
def serialize ( self , value , state ) : dict_value = self . _converter . to_dict ( value ) return self . _dictionary . serialize ( dict_value , state )
Serialize the value to a new element and returns the element .
3,950
def serialize_on_parent ( self , parent , value , state ) : dict_value = self . _converter . to_dict ( value ) self . _dictionary . serialize_on_parent ( parent , dict_value , state )
Serialize the value and adds it to the parent .
3,951
def parse_at_element ( self , element , state ) : item_iter = element . findall ( self . _item_processor . element_path ) return self . _parse ( item_iter , state )
Parse the provided element as an array .
3,952
def parse_at_root ( self , root , state ) : if not self . _nested : raise InvalidRootProcessor ( 'Non-nested array "{}" cannot be root element' . format ( self . alias ) ) parsed_array = [ ] array_element = _element_find_from_root ( root , self . _nested ) if array_element is not None : parsed_array = self . parse_at_element ( array_element , state ) elif self . required : raise MissingValue ( 'Missing required array at root: "{}"' . format ( self . _nested ) ) return parsed_array
Parse the root XML element as an array .
3,953
def parse_from_parent ( self , parent , state ) : item_iter = parent . findall ( self . _item_path ) return self . _parse ( item_iter , state )
Parse the array data from the provided parent XML element .
3,954
def serialize ( self , value , state ) : if self . _nested is None : state . raise_error ( InvalidRootProcessor , 'Cannot directly serialize a non-nested array "{}"' . format ( self . alias ) ) if not value and self . required : state . raise_error ( MissingValue , 'Missing required array: "{}"' . format ( self . alias ) ) start_element , end_element = _element_path_create_new ( self . _nested ) self . _serialize ( end_element , value , state ) return start_element
Serialize the value into a new Element object and return it .
3,955
def serialize_on_parent ( self , parent , value , state ) : if not value and self . required : state . raise_error ( MissingValue , 'Missing required array: "{}"' . format ( self . alias ) ) if not value and self . omit_empty : return if self . _nested is not None : array_parent = _element_get_or_add_from_parent ( parent , self . _nested ) else : array_parent = parent self . _serialize ( array_parent , value , state )
Serialize the value and append it to the parent element .
3,956
def _parse ( self , item_iter , state ) : parsed_array = [ ] for i , item in enumerate ( item_iter ) : state . push_location ( self . _item_processor . element_path , i ) parsed_array . append ( self . _item_processor . parse_at_element ( item , state ) ) state . pop_location ( ) if not parsed_array and self . required : state . raise_error ( MissingValue , 'Missing required array "{}"' . format ( self . alias ) ) return parsed_array
Parse the array data using the provided iterator of XML elements .
3,957
def _serialize ( self , array_parent , value , state ) : if not value : return for i , item_value in enumerate ( value ) : state . push_location ( self . _item_processor . element_path , i ) item_element = self . _item_processor . serialize ( item_value , state ) array_parent . append ( item_element ) state . pop_location ( )
Serialize the array value and add it to the array parent element .
3,958
def parse_at_element ( self , element , state ) : parsed_dict = { } for child in self . _child_processors : state . push_location ( child . element_path ) parsed_dict [ child . alias ] = child . parse_from_parent ( element , state ) state . pop_location ( ) return parsed_dict
Parse the provided element as a dictionary .
3,959
def parse_at_root ( self , root , state ) : parsed_dict = { } dict_element = _element_find_from_root ( root , self . element_path ) if dict_element is not None : parsed_dict = self . parse_at_element ( dict_element , state ) elif self . required : raise MissingValue ( 'Missing required root aggregate "{}"' . format ( self . element_path ) ) return parsed_dict
Parse the root XML element as a dictionary .
3,960
def serialize ( self , value , state ) : if not value and self . required : state . raise_error ( MissingValue , 'Missing required aggregate "{}"' . format ( self . element_path ) ) start_element , end_element = _element_path_create_new ( self . element_path ) self . _serialize ( end_element , value , state ) return start_element
Serialize the value to a new element and return the element .
3,961
def _serialize ( self , element , value , state ) : for child in self . _child_processors : state . push_location ( child . element_path ) child_value = value . get ( child . alias ) child . serialize_on_parent ( element , child_value , state ) state . pop_location ( )
Serialize the dictionary and append all serialized children to the element .
3,962
def parse_at_element ( self , element , state ) : xml_value = self . _processor . parse_at_element ( element , state ) return _hooks_apply_after_parse ( self . _hooks , state , xml_value )
Parse the given element .
3,963
def parse_at_root ( self , root , state ) : xml_value = self . _processor . parse_at_root ( root , state ) return _hooks_apply_after_parse ( self . _hooks , state , xml_value )
Parse the given element as the root of the document .
3,964
def parse_from_parent ( self , parent , state ) : xml_value = self . _processor . parse_from_parent ( parent , state ) return _hooks_apply_after_parse ( self . _hooks , state , xml_value )
Parse the element from the given parent element .
3,965
def serialize ( self , value , state ) : xml_value = _hooks_apply_before_serialize ( self . _hooks , state , value ) return self . _processor . serialize ( xml_value , state )
Serialize the value and returns it .
3,966
def serialize_on_parent ( self , parent , value , state ) : xml_value = _hooks_apply_before_serialize ( self . _hooks , state , value ) self . _processor . serialize_on_parent ( parent , xml_value , state )
Serialize the value directory on the parent .
3,967
def parse_at_element ( self , element , state ) : if self . _attribute : parsed_value = self . _parse_attribute ( element , self . _attribute , state ) else : parsed_value = self . _parser_func ( element . text , state ) return _hooks_apply_after_parse ( self . _hooks , state , parsed_value )
Parse the primitive value at the XML element .
3,968
def parse_from_parent ( self , parent , state ) : element = parent . find ( self . element_path ) if element is None and self . required : state . raise_error ( MissingValue , 'Missing required element "{}"' . format ( self . element_path ) ) elif element is not None : return self . parse_at_element ( element , state ) return _hooks_apply_after_parse ( self . _hooks , state , self . _default )
Parse the primitive value under the parent XML element .
3,969
def serialize ( self , value , state ) : start_element , end_element = _element_path_create_new ( self . element_path ) self . _serialize ( end_element , value , state ) return start_element
Serialize the value into a new element object and return the element .
3,970
def serialize_on_parent ( self , parent , value , state ) : if value is None and self . required : state . raise_error ( MissingValue , self . _missing_value_message ( parent ) ) if not value and self . omit_empty : return element = _element_get_or_add_from_parent ( parent , self . element_path ) self . _serialize ( element , value , state )
Serialize the value and add it to the parent element .
3,971
def _missing_value_message ( self , parent ) : if self . _attribute is None : message = 'Missing required value for element "{}"' . format ( self . element_path ) else : if self . element_path == '.' : parent_name = parent . tag else : parent_name = self . element_path message = 'Missing required value for attribute "{}" on element "{}"' . format ( self . _attribute , parent_name ) return message
Return the message to report that the value needed for serialization is missing .
3,972
def _parse_attribute ( self , element , attribute , state ) : parsed_value = self . _default attribute_value = element . get ( attribute , None ) if attribute_value is not None : parsed_value = self . _parser_func ( attribute_value , state ) elif self . required : state . raise_error ( MissingValue , 'Missing required attribute "{}" on element "{}"' . format ( self . _attribute , element . tag ) ) return parsed_value
Parse the primitive value within the XML element s attribute .
3,973
def _serialize ( self , element , value , state ) : xml_value = _hooks_apply_before_serialize ( self . _hooks , state , value ) if xml_value is None : if self . _default is None : serialized_value = Text ( '' ) else : serialized_value = Text ( self . _default ) else : serialized_value = Text ( xml_value ) if self . _attribute : element . set ( self . _attribute , serialized_value ) else : element . text = serialized_value
Serialize the value to the element .
3,974
def push_location ( self , element_path , array_index = None ) : location = ProcessorLocation ( element_path = element_path , array_index = array_index ) self . _locations . append ( location )
Push an item onto the state s stack of locations .
3,975
def raise_error ( self , exception_type , message ) : error_message = '{} at {}' . format ( message , repr ( self ) ) raise exception_type ( error_message )
Raise an exception with the current parser state information and error message .
3,976
def export_partlist_to_file ( input , output , timeout = 20 , showgui = False ) : input = norm_path ( input ) output = norm_path ( output ) commands = export_command ( output = output , output_type = 'partlist' ) command_eagle ( input = input , timeout = timeout , commands = commands , showgui = showgui )
call eagle and export sch or brd to partlist text file
3,977
def parse_partlist ( str ) : lines = str . strip ( ) . splitlines ( ) lines = filter ( len , lines ) hind = header_index ( lines ) if hind is None : log . debug ( 'empty partlist found' ) return ( [ ] , [ ] ) header_line = lines [ hind ] header = header_line . split ( ' ' ) header = filter ( len , header ) positions = [ header_line . index ( x ) for x in header ] header = [ x . strip ( ) . split ( ) [ 0 ] . lower ( ) for x in header ] data_lines = lines [ hind + 1 : ] def parse_data_line ( line ) : y = [ ( h , line [ pos1 : pos2 ] . strip ( ) ) for h , pos1 , pos2 in zip ( header , positions , positions [ 1 : ] + [ 1000 ] ) ] return dict ( y ) data = [ parse_data_line ( x ) for x in data_lines ] return ( header , data )
parse partlist text delivered by eagle .
3,978
def raw_partlist ( input , timeout = 20 , showgui = False ) : output = tempfile . NamedTemporaryFile ( prefix = 'eagexp_' , suffix = '.partlist' , delete = 0 ) . name export_partlist_to_file ( input = input , output = output , timeout = timeout , showgui = showgui ) s = Path ( output ) . text ( encoding = 'latin1' ) os . remove ( output ) return s
export partlist by eagle then return it
3,979
def structured_partlist ( input , timeout = 20 , showgui = False ) : s = raw_partlist ( input = input , timeout = timeout , showgui = showgui ) return parse_partlist ( s )
export partlist by eagle then parse it
3,980
def print_partlist ( input , timeout = 20 , showgui = False ) : print raw_partlist ( input = input , timeout = timeout , showgui = showgui )
print partlist text delivered by eagle
3,981
def bitset ( bs , member_label = None , filename = None , directory = None , format = None , render = False , view = False ) : if member_label is None : member_label = MEMBER_LABEL if filename is None : kind = 'members' if member_label else 'bits' filename = FILENAME % ( bs . __name__ , kind ) dot = graphviz . Digraph ( name = bs . __name__ , comment = repr ( bs ) , filename = filename , directory = directory , format = format , edge_attr = { 'dir' : 'none' } ) node_name = NAME_GETTERS [ 0 ] if callable ( member_label ) : node_label = member_label else : node_label = LABEL_GETTERS [ member_label ] for i in range ( bs . supremum + 1 ) : b = bs . fromint ( i ) name = node_name ( b ) dot . node ( name , node_label ( b ) ) dot . edges ( ( name , node_name ( b & ~ a ) ) for a in b . atoms ( reverse = True ) ) if render or view : dot . render ( view = view ) return dot
Graphviz source for the Hasse diagram of the domains Boolean algebra .
3,982
def _get_field_method ( self , tp ) : method = self . field_constructor . get ( tp ) if method and hasattr ( self , method . __name__ ) : return getattr ( self , method . __name__ ) return method
Returns a reference to the form element s constructor method .
3,983
def _create_plain_field ( self , attr , options ) : method = self . _get_field_method ( attr . py_type ) or self . _create_other_field klass , options = method ( attr , options ) if attr . is_unique : options [ 'validators' ] . append ( validators . UniqueEntityValidator ( attr . entity ) ) return klass , options
Creates the form element .
3,984
def _create_relational_field ( self , attr , options ) : options [ 'entity_class' ] = attr . py_type options [ 'allow_empty' ] = not attr . is_required return EntityField , options
Creates the form element for working with entity relationships .
3,985
def add ( self , attr , field_class = None , ** options ) : def add ( klass , options ) : if klass : self . _fields [ attr . name ] = field_class ( ** options ) if field_class else klass ( ** options ) return self kwargs = { 'label' : attr . name , 'default' : attr . default , 'validators' : [ ] , } kwargs . update ( options ) if attr . is_pk : return add ( * self . _create_pk_field ( attr , kwargs ) ) if attr . is_collection : return add ( * self . _create_collection_field ( attr , kwargs ) ) validator = wtf_validators . InputRequired ( ) if attr . is_required and not attr . is_pk else wtf_validators . Optional ( ) kwargs [ 'validators' ] . insert ( 0 , validator ) if attr . is_relation : return add ( * self . _create_relational_field ( attr , kwargs ) ) return add ( * self . _create_plain_field ( attr , kwargs ) )
Adds an element to the form based on the entity attribute .
3,986
def add_button ( self , name , button_class = wtf_fields . SubmitField , ** options ) : self . _buttons [ name ] = button_class ( ** options )
Adds a button to the form .
3,987
def field_uuid ( self , attr , options ) : options [ 'validators' ] . append ( validators . UUIDValidator ( attr . entity ) ) return wtf_fields . StringField , options
Creates a form element for the UUID type .
3,988
def runm ( ) : signal . signal ( signal . SIGINT , signal_handler ) count = int ( sys . argv . pop ( 1 ) ) processes = [ Process ( target = run , args = ( ) ) for x in range ( count ) ] try : for p in processes : p . start ( ) except KeyError : pass finally : for p in processes : p . join ( )
This is super minimal and pretty hacky but it counts as a first pass .
3,989
def identify ( self , request ) : token = self . get_jwt ( request ) if token is None : return NO_IDENTITY try : claims_set = self . decode_jwt ( token ) except ( DecodeError , ExpiredSignatureError ) : return NO_IDENTITY userid = self . get_userid ( claims_set ) if userid is None : return NO_IDENTITY extra_claims = self . get_extra_claims ( claims_set ) if extra_claims is not None : return Identity ( userid = userid , ** extra_claims ) else : return Identity ( userid = userid )
Establish what identity this user claims to have from request .
3,990
def remember ( self , response , request , identity ) : claims = identity . as_dict ( ) userid = claims . pop ( 'userid' ) claims_set = self . create_claims_set ( request , userid , claims ) token = self . encode_jwt ( claims_set ) response . headers [ 'Authorization' ] = '%s %s' % ( self . auth_header_prefix , token )
Remember identity on response .
3,991
def decode_jwt ( self , token , verify_expiration = True ) : options = { 'verify_exp' : verify_expiration , } return jwt . decode ( token , self . public_key , algorithms = [ self . algorithm ] , options = options , leeway = self . leeway , issuer = self . issuer )
Decode a JWTAuth token into its claims set .
3,992
def create_claims_set ( self , request , userid , extra_claims = None ) : claims_set = { self . userid_claim : userid } now = timegm ( datetime . utcnow ( ) . utctimetuple ( ) ) if self . expiration_delta is not None : claims_set [ 'exp' ] = now + self . expiration_delta if self . issuer is not None : claims_set [ 'iss' ] = self . issuer if self . allow_refresh : if self . refresh_delta is not None : claims_set [ 'refresh_until' ] = now + self . refresh_delta if self . refresh_nonce_handler is not None : claims_set [ 'nonce' ] = self . refresh_nonce_handler ( request , userid ) if extra_claims is not None : claims_set . update ( extra_claims ) return claims_set
Create the claims set based on the userid of the claimed identity the settings and the extra_claims dictionary .
3,993
def encode_jwt ( self , claims_set ) : token = jwt . encode ( claims_set , self . private_key , self . algorithm ) if PY3 : token = token . decode ( encoding = 'UTF-8' ) return token
Encode a JWT token based on the claims_set and the settings .
3,994
def get_extra_claims ( self , claims_set ) : reserved_claims = ( self . userid_claim , "iss" , "aud" , "exp" , "nbf" , "iat" , "jti" , "refresh_until" , "nonce" ) extra_claims = { } for claim in claims_set : if claim not in reserved_claims : extra_claims [ claim ] = claims_set [ claim ] if not extra_claims : return None return extra_claims
Get claims holding extra identity info from the claims set .
3,995
def get_jwt ( self , request ) : try : authorization = request . authorization except ValueError : return None if authorization is None : return None authtype , token = authorization if authtype . lower ( ) != self . auth_header_prefix . lower ( ) : return None return token
Extract the JWT token from the authorisation header of the request .
3,996
def verify_refresh ( self , request ) : if not self . allow_refresh : raise InvalidTokenError ( 'Token refresh is disabled' ) token = self . get_jwt ( request ) if token is None : raise InvalidTokenError ( 'Token not found' ) try : claims_set = self . decode_jwt ( token , self . verify_expiration_on_refresh ) except DecodeError : raise DecodeError ( 'Token could not be decoded' ) except ExpiredSignatureError : raise ExpiredSignatureError ( 'Token has expired' ) userid = self . get_userid ( claims_set ) if userid is None : raise MissingRequiredClaimError ( self . userid_claim ) if self . refresh_nonce_handler is not None : if 'nonce' not in claims_set : raise MissingRequiredClaimError ( 'nonce' ) if self . refresh_nonce_handler ( request , userid ) != claims_set [ 'nonce' ] : raise InvalidTokenError ( 'Refresh nonce is not valid' ) if self . refresh_delta is not None : if 'refresh_until' not in claims_set : raise MissingRequiredClaimError ( 'refresh_until' ) now = timegm ( datetime . utcnow ( ) . utctimetuple ( ) ) refresh_until = int ( claims_set [ 'refresh_until' ] ) if refresh_until < ( now - self . leeway ) : raise ExpiredSignatureError ( 'Refresh nonce has expired' ) return userid
Verify if the request to refresh the token is valid . If valid it returns the userid which can be used to create an updated identity with remember_identity . Otherwise it raises an exception based on InvalidTokenError .
3,997
def fit_theil_sen ( x , y ) : xx = numpy . asarray ( x ) y1 = numpy . asarray ( y ) n = len ( xx ) if n < 5 : raise ValueError ( 'Number of points < 5' ) if xx . ndim != 1 : raise ValueError ( 'Input arrays have unexpected dimensions' ) if y1 . ndim == 1 : if len ( y1 ) != n : raise ValueError ( 'X and Y arrays have different sizes' ) yy = y1 [ numpy . newaxis , : ] elif y1 . ndim == 2 : if n != y1 . shape [ 0 ] : raise ValueError ( 'Y-array size in the fitting direction is different to the X-array size' ) yy = y1 . T else : raise ValueError ( 'Input arrays have unexpected dimensions' ) nmed = n // 2 iextra = nmed if ( n % 2 ) == 0 else nmed + 1 deltx = xx [ iextra : ] - xx [ : nmed ] delty = yy [ : , iextra : ] - yy [ : , : nmed ] allslopes = delty / deltx slopes = numpy . median ( allslopes , axis = 1 ) allinters = yy - slopes [ : , numpy . newaxis ] * x inters = numpy . median ( allinters , axis = 1 ) coeff = numpy . array ( [ inters , slopes ] ) return numpy . squeeze ( coeff )
Compute a robust linear fit using the Theil - Sen method .
3,998
def process_unknown_arguments ( unknowns ) : result = argparse . Namespace ( ) result . extra_control = { } for unknown in unknowns : prefix = '--parameter-' if unknown . startswith ( prefix ) : values = unknown . split ( '=' ) if len ( values ) == 2 : key = values [ 0 ] [ len ( prefix ) : ] val = values [ 1 ] if key : result . extra_control [ key ] = val return result
Process arguments unknown to the parser
3,999
def get_fd ( file_or_fd , default = None ) : fd = file_or_fd if fd is None : fd = default if hasattr ( fd , "fileno" ) : fd = fd . fileno ( ) return fd
Helper function for getting a file descriptor .