idx int64 0 63k | question stringlengths 53 5.28k | target stringlengths 5 805 |
|---|---|---|
47,500 | def _assign_numbers ( self ) : first = self . select_related ( 'point_of_sales' , 'receipt_type' ) . first ( ) next_num = Receipt . objects . fetch_last_receipt_number ( first . point_of_sales , first . receipt_type , ) + 1 for receipt in self . filter ( receipt_number__isnull = True ) : Receipt . objects . filter ( pk = receipt . id , receipt_number__isnull = True , ) . update ( receipt_number = next_num , ) next_num += 1 | Assign numbers in preparation for validating these receipts . |
47,501 | def check_groupable ( self ) : types = self . aggregate ( poses = Count ( 'point_of_sales_id' , distinct = True ) , types = Count ( 'receipt_type' , distinct = True ) , ) if set ( types . values ( ) ) > { 1 } : raise exceptions . CannotValidateTogether ( ) return self | Checks that all receipts returned by this queryset are groupable . |
47,502 | def validate ( self , ticket = None ) : qs = self . filter ( validation__isnull = True ) . check_groupable ( ) if qs . count ( ) == 0 : return [ ] qs . order_by ( 'issued_date' , 'id' ) . _assign_numbers ( ) return qs . _validate ( ticket ) | Validates all receipts matching this queryset . |
47,503 | def fetch_last_receipt_number ( self , point_of_sales , receipt_type ) : client = clients . get_client ( 'wsfe' , point_of_sales . owner . is_sandboxed ) response_xml = client . service . FECompUltimoAutorizado ( serializers . serialize_ticket ( point_of_sales . owner . get_or_create_ticket ( 'wsfe' ) ) , point_of_sales . number , receipt_type . code , ) check_response ( response_xml ) return response_xml . CbteNro | Returns the number for the last validated receipt . |
47,504 | def total_vat ( self ) : q = Vat . objects . filter ( receipt = self ) . aggregate ( total = Sum ( 'amount' ) ) return q [ 'total' ] or 0 | Returns the sum of all Vat objects . |
47,505 | def total_tax ( self ) : q = Tax . objects . filter ( receipt = self ) . aggregate ( total = Sum ( 'amount' ) ) return q [ 'total' ] or 0 | Returns the sum of all Tax objects . |
47,506 | def is_validated ( self ) : if not self . receipt_number : return False try : return self . validation . result == ReceiptValidation . RESULT_APPROVED except ReceiptValidation . DoesNotExist : return False | Returns True if this instance is validated . |
47,507 | def validate ( self , ticket = None , raise_ = False ) : rv = Receipt . objects . filter ( pk = self . pk ) . validate ( ticket ) self . refresh_from_db ( ) if raise_ and rv : raise exceptions . ValidationError ( rv [ 0 ] ) return rv | Validates this receipt . |
47,508 | def create_for_receipt ( self , receipt , ** kwargs ) : try : profile = TaxPayerProfile . objects . get ( taxpayer__points_of_sales__receipts = receipt , ) except TaxPayerProfile . DoesNotExist : raise exceptions . DjangoAfipException ( 'Cannot generate a PDF for taxpayer with no profile' , ) pdf = ReceiptPDF . objects . create ( receipt = receipt , issuing_name = profile . issuing_name , issuing_address = profile . issuing_address , issuing_email = profile . issuing_email , vat_condition = profile . vat_condition , gross_income_condition = profile . gross_income_condition , sales_terms = profile . sales_terms , ** kwargs ) return pdf | Creates a ReceiptPDF object for a given receipt . Does not actually generate the related PDF file . |
47,509 | def save_pdf ( self , save_model = True ) : from django_afip . views import ReceiptPDFView if not self . receipt . is_validated : raise exceptions . DjangoAfipException ( _ ( 'Cannot generate pdf for non-authorized receipt' ) ) self . pdf_file = File ( BytesIO ( ) , name = '{}.pdf' . format ( uuid . uuid4 ( ) . hex ) ) render_pdf ( template = 'receipts/code_{}.html' . format ( self . receipt . receipt_type . code , ) , file_ = self . pdf_file , context = ReceiptPDFView . get_context_for_pk ( self . receipt_id ) , ) if save_model : self . save ( ) | Save the receipt as a PDF related to this model . |
47,510 | def compute_amount ( self ) : self . amount = self . base_amount * self . aliquot / 100 return self . amount | Auto - assign and return the total amount for this tax . |
47,511 | def get_label_based_random_walk_matrix ( adjacency_matrix , labelled_nodes , label_absorption_probability ) : rw_transition = sparse . csr_matrix ( adjacency_matrix , dtype = np . float64 ) out_degree = rw_transition . sum ( axis = 1 ) in_degree = rw_transition . sum ( axis = 0 ) for i in np . arange ( rw_transition . shape [ 0 ] ) : rw_transition . data [ rw_transition . indptr [ i ] : rw_transition . indptr [ i + 1 ] ] = rw_transition . data [ rw_transition . indptr [ i ] : rw_transition . indptr [ i + 1 ] ] / out_degree [ i ] out_degree = np . array ( out_degree ) . astype ( np . float64 ) . reshape ( out_degree . size ) in_degree = np . array ( in_degree ) . astype ( np . float64 ) . reshape ( in_degree . size ) diag = np . zeros_like ( out_degree ) diag [ labelled_nodes ] = 1.0 diag = sparse . dia_matrix ( ( diag , [ 0 ] ) , shape = ( in_degree . size , in_degree . size ) ) diag = sparse . csr_matrix ( diag ) rw_transition [ labelled_nodes , : ] = ( 1 - label_absorption_probability ) * rw_transition [ labelled_nodes , : ] + label_absorption_probability * diag [ labelled_nodes , : ] return rw_transition , out_degree , in_degree | Returns the label - absorbing random walk transition probability matrix . |
47,512 | def get_natural_random_walk_matrix ( adjacency_matrix , make_shared = False ) : rw_transition = sparse . csr_matrix ( adjacency_matrix , dtype = np . float64 , copy = True ) out_degree = rw_transition . sum ( axis = 1 ) in_degree = rw_transition . sum ( axis = 0 ) out_degree [ out_degree == 0.0 ] = 1.0 for i in np . arange ( rw_transition . shape [ 0 ] ) : rw_transition . data [ rw_transition . indptr [ i ] : rw_transition . indptr [ i + 1 ] ] = rw_transition . data [ rw_transition . indptr [ i ] : rw_transition . indptr [ i + 1 ] ] / out_degree [ i ] rw_transition . sort_indices ( ) out_degree = np . array ( out_degree ) . astype ( np . float64 ) . reshape ( out_degree . size ) in_degree = np . array ( in_degree ) . astype ( np . float64 ) . reshape ( in_degree . size ) if make_shared : number_of_nodes = adjacency_matrix . shape [ 0 ] out_degree_c = mp . Array ( c . c_double , number_of_nodes ) in_degree_c = mp . Array ( c . c_double , number_of_nodes ) out_degree_shared = np . frombuffer ( out_degree_c . get_obj ( ) , dtype = np . float64 , count = number_of_nodes ) in_degree_shared = np . frombuffer ( in_degree_c . get_obj ( ) , dtype = np . float64 , count = number_of_nodes ) out_degree_shared [ : ] = out_degree [ : ] in_degree_shared [ : ] = in_degree [ : ] indices_c = mp . Array ( c . c_int64 , rw_transition . indices . size ) indptr_c = mp . Array ( c . c_int64 , rw_transition . indptr . size ) data_c = mp . Array ( c . c_double , rw_transition . data . size ) indices_shared = np . frombuffer ( indices_c . get_obj ( ) , dtype = np . int64 , count = rw_transition . indices . size ) indptr_shared = np . frombuffer ( indptr_c . get_obj ( ) , dtype = np . int64 , count = rw_transition . indptr . size ) data_shared = np . frombuffer ( data_c . get_obj ( ) , dtype = np . float64 , count = rw_transition . data . size ) indices_shared [ : ] = rw_transition . indices [ : ] indptr_shared [ : ] = rw_transition . indptr [ : ] data_shared [ : ] = rw_transition . data [ : ] rw_transition = sparse . csr_matrix ( ( data_shared , indices_shared , indptr_shared ) , shape = rw_transition . shape ) return rw_transition , out_degree , in_degree | Returns the natural random walk transition probability matrix given the adjacency matrix . |
47,513 | def start ( st_reg_number ) : divisor = 11 if len ( st_reg_number ) > 9 : return False if len ( st_reg_number ) < 9 : return False sum_total = 0 peso = 8 for i in range ( len ( st_reg_number ) - 2 ) : sum_total = sum_total + int ( st_reg_number [ i ] ) * peso peso = peso - 1 rest_division = sum_total % divisor digit_first = divisor - rest_division if rest_division < 2 : digit_first = 0 if rest_division > 1 : digit_first = 11 - rest_division num = 0 peso = 9 mult = 10000000 for i in range ( len ( st_reg_number ) - 2 ) : num = num + int ( st_reg_number [ i ] ) * mult mult = mult / 10 num = num + digit_first new_st = str ( num ) sum_total = 0 peso = 9 for i in range ( len ( new_st ) - 1 ) : sum_total = sum_total + int ( new_st [ i ] ) * peso peso = peso - 1 rest_division = sum_total % divisor if rest_division < 2 : digit_secund = 0 if rest_division > 1 : digit_secund = divisor - rest_division if digit_secund == int ( st_reg_number [ len ( st_reg_number ) - 1 ] ) and digit_first == int ( st_reg_number [ len ( st_reg_number ) - 2 ] ) : return True else : return False | Checks the number valiaty for the Pernanbuco state |
47,514 | def start ( st_reg_number ) : weights_second_digit = range ( len ( st_reg_number ) - 1 , 1 , - 1 ) weights_first_digit = range ( len ( st_reg_number ) , 1 , - 1 ) second_digits = st_reg_number [ - 1 : ] number_state_registration = st_reg_number [ 0 : len ( st_reg_number ) - 2 ] digits_state_registration = st_reg_number [ - 2 : ] sum_second_digit = 0 sum_first_digit = 0 if len ( st_reg_number ) != 8 and len ( st_reg_number ) != 9 : return False if st_reg_number [ - 8 ] in [ '0' , '1' , '2' , '3' , '4' , '5' , '8' ] : for i in weights_second_digit : sum_second_digit = sum_second_digit + i * int ( st_reg_number [ - i - 1 ] ) second_digits_check = 10 - ( sum_second_digit % 10 ) if sum_second_digit % 10 == 0 or sum_second_digit % 11 == 1 : second_digits_check = '0' if str ( second_digits_check ) != second_digits : return False digit_two = number_state_registration + str ( second_digits_check ) for i in weights_first_digit : sum_first_digit = sum_first_digit + i * int ( digit_two [ - i + 1 ] ) first_digits_check = 10 - ( sum_first_digit % 10 ) if sum_first_digit % 10 == 0 or sum_first_digit % 10 == 1 : first_digits_check = '0' digits_calculated = str ( first_digits_check ) + str ( second_digits_check ) return digits_calculated == digits_state_registration elif st_reg_number [ - 8 ] in [ '6' , '7' , '9' ] : for i in weights_second_digit : sum_second_digit = sum_second_digit + i * int ( st_reg_number [ - i - 1 ] ) second_digits_check = 11 - ( sum_second_digit % 11 ) if sum_second_digit % 11 == 0 or sum_second_digit % 11 == 1 : second_digits_check = '0' if str ( second_digits_check ) != second_digits : return False digit_two = number_state_registration + str ( second_digits_check ) for i in weights_first_digit : sum_first_digit = sum_first_digit + i * int ( digit_two [ - i + 1 ] ) first_digits_check = 11 - ( sum_first_digit % 11 ) if sum_first_digit % 11 == 0 or sum_first_digit % 11 == 1 : first_digits_check = '0' digits_calculated = str ( first_digits_check ) + str ( second_digits_check ) return digits_calculated == digits_state_registration | Checks the number valiaty for the Bahia state |
47,515 | async def send_http_response ( writer , http_code : int , headers : List [ Tuple [ str , str ] ] , content : bytes , http_status : str = None ) -> None : if not http_status : http_status = STATUS_CODES . get ( http_code , 'Unknown' ) response : bytes = f'HTTP/1.1 {http_code} {http_status}\r\n' . encode ( ) for k , v in headers : response += f'{k}: {v}\r\n' . encode ( ) response += b'\r\n' response += content writer . write ( response ) await writer . drain ( ) | generate http response payload and send to writer |
47,516 | def start ( st_reg_number ) : weights = [ 9 , 8 , 7 , 6 , 5 , 4 , 3 , 2 ] digit_state_registration = st_reg_number [ - 1 ] if len ( st_reg_number ) != 9 : return False sum_total = 0 for i in range ( 0 , 8 ) : sum_total = sum_total + weights [ i ] * int ( st_reg_number [ i ] ) if sum_total % 11 == 0 : return digit_state_registration [ - 1 ] == '0' digit_check = 11 - sum_total % 11 return str ( digit_check ) == digit_state_registration | Checks the number valiaty for the Paraiba state |
47,517 | def start ( st_reg_number ) : weights = [ 3 , 2 , 9 , 8 , 7 , 6 , 5 , 4 , 3 , 2 ] digit_state_registration = st_reg_number [ - 1 ] if len ( st_reg_number ) != 11 : return False sum = 0 for i in range ( 0 , 10 ) : sum = sum + weights [ i ] * int ( st_reg_number [ i ] ) if sum % 11 == 0 : return digit_state_registration [ - 1 ] == '0' digit_check = 11 - sum % 11 return str ( digit_check ) == digit_state_registration | Checks the number valiaty for the Mato Grosso state |
47,518 | def start ( st_reg_number ) : divisor = 11 if len ( st_reg_number ) > 9 : return False if len ( st_reg_number ) < 9 : return False sum_total = 0 peso = 9 for i in range ( len ( st_reg_number ) - 1 ) : sum_total = sum_total + int ( st_reg_number [ i ] ) * peso peso = peso - 1 rest_division = sum_total % divisor digit = divisor - rest_division if digit == 10 or digit == 11 : digit = 0 return digit == int ( st_reg_number [ len ( st_reg_number ) - 1 ] ) | Checks the number valiaty for the Sergipe state |
47,519 | def start ( st_reg_number ) : weights = [ 9 , 8 , 7 , 6 , 5 , 4 , 3 , 2 ] number_state_registration = st_reg_number [ 0 : len ( st_reg_number ) - 1 ] digit_state_registration = st_reg_number [ - 1 ] if st_reg_number [ 0 : 2 ] not in [ '10' , '11' , '12' ] : return False if len ( st_reg_number ) != 9 : return False sum_total = 0 for i in weights : sum_total = sum_total + i * ( int ( number_state_registration [ - i + 1 ] ) ) check_number = number_state_registration <= 10119997 if sum_total % 11 == 0 : return '0' == digit_state_registration elif sum_total % 11 == 1 and ( int ( number_state_registration ) >= 10103105 and check_number ) : return '1' == digit_state_registration elif sum_total % 11 == 1 : return '0' == digit_state_registration else : digit_check = 11 - sum_total % 11 return digit_state_registration == str ( digit_check ) if number_state_registration == '11094402' and ( number_state_registration == '1' or number_state_registration == '0' ) : return True | Checks the number valiaty for the Espirito Santo state |
47,520 | def start ( st_reg_number ) : weights = [ 4 , 3 , 2 , 9 , 8 , 7 , 6 , 5 , 4 , 3 , 2 ] digits = st_reg_number [ : len ( st_reg_number ) - 2 ] check_digits = st_reg_number [ - 2 : ] divisor = 11 if len ( st_reg_number ) > 13 : return False sum_total = 0 for i in range ( len ( digits ) ) : sum_total = sum_total + int ( digits [ i ] ) * weights [ i ] rest_division = sum_total % divisor first_digit = divisor - rest_division if first_digit == 10 or first_digit == 11 : first_digit = 0 if str ( first_digit ) != check_digits [ 0 ] : return False digits = digits + str ( first_digit ) weights = [ 5 ] + weights sum_total = 0 for i in range ( len ( digits ) ) : sum_total = sum_total + int ( digits [ i ] ) * weights [ i ] rest_division = sum_total % divisor second_digit = divisor - rest_division if second_digit == 10 or second_digit == 11 : second_digit = 0 return str ( first_digit ) + str ( second_digit ) == check_digits | Checks the number valiaty for the Acre state |
47,521 | def start ( st_reg_number ) : number_state_registration_first_digit = st_reg_number [ 0 : 3 ] + '0' + st_reg_number [ 3 : len ( st_reg_number ) - 2 ] weights_first_digit = [ 1 , 2 , 1 , 2 , 1 , 2 , 1 , 2 , 1 , 2 , 1 , 2 ] wights_second_digit = [ 3 , 2 , 11 , 10 , 9 , 8 , 7 , 6 , 5 , 4 , 3 , 2 ] first_digit = st_reg_number [ - 2 ] second_digit = st_reg_number [ - 1 ] sum_first_digit = 0 sum_second_digit = 0 sum_result_digit = '' sum_end = 0 if len ( st_reg_number ) != 13 : return False for i in range ( 0 , 12 ) : sum_first_digit = weights_first_digit [ i ] * int ( number_state_registration_first_digit [ i ] ) sum_result_digit = sum_result_digit + str ( sum_first_digit ) for i in range ( 0 , len ( sum_result_digit ) ) : sum_end = sum_end + int ( sum_result_digit [ i ] ) if sum_end % 10 == 0 : check_digit_one = 0 elif sum_end < 10 : check_digit_one = 10 - sum_end elif sum_end > 10 : check_digit_one = ( 10 - sum_end % 10 ) if str ( check_digit_one ) != first_digit : return False number_state_registration_second_digit = st_reg_number + str ( check_digit_one ) for i in range ( 0 , 12 ) : sum_second_digit = sum_second_digit + wights_second_digit [ i ] * int ( number_state_registration_second_digit [ i ] ) check_second_digit = 11 - sum_second_digit % 11 if sum_second_digit == 1 or sum_second_digit == 0 : return second_digit == '0' else : return str ( check_second_digit ) == second_digit | Checks the number valiaty for the Minas Gerais state |
47,522 | def start ( st_reg_number ) : weights = range ( 2 , 10 ) digits = st_reg_number [ 0 : len ( st_reg_number ) - 1 ] control_digit = 11 check_digit = st_reg_number [ - 1 : ] if len ( st_reg_number ) != 9 : return False sum_total = 0 for i in weights : sum_total = sum_total + i * int ( digits [ i - 2 ] ) if sum_total < control_digit : control_digit = 11 - sum_total return str ( digit_calculated ) == check_digit elif sum_total % 11 <= 1 : return '0' == check_digit else : digit_calculated = 11 - sum_total % 11 return str ( digit_calculated ) == check_digit | Checks the number valiaty for the Amazonas state |
47,523 | def start ( state_registration_number , state_abbreviation ) : state_abbreviation = state_abbreviation . upper ( ) states_validations = { 'AC' : "ac.start(" + "\"" + state_registration_number + "\"" + ")" , 'AL' : "al.start(" + "\"" + state_registration_number + "\"" + ")" , 'AM' : "am.start(" + "\"" + state_registration_number + "\"" + ")" , 'AP' : "ap.start(" + "\"" + state_registration_number + "\"" + ")" , 'BA' : "ba.start(" + "\"" + state_registration_number + "\"" + ")" , 'CE' : "ce.start(" + "\"" + state_registration_number + "\"" + ")" , 'DF' : "df.start(" + "\"" + state_registration_number + "\"" + ")" , 'ES' : "es.start(" + "\"" + state_registration_number + "\"" + ")" , 'GO' : "go.start(" + "\"" + state_registration_number + "\"" + ")" , 'MA' : "ma.start(" + "\"" + state_registration_number + "\"" + ")" , 'MG' : "mg.start(" + "\"" + state_registration_number + "\"" + ")" , 'MS' : "ms.start(" + "\"" + state_registration_number + "\"" + ")" , 'MT' : "mt.start(" + "\"" + state_registration_number + "\"" + ")" , 'PA' : "pa.start(" + "\"" + state_registration_number + "\"" + ")" , 'PB' : "pb.start(" + "\"" + state_registration_number + "\"" + ")" , 'PE' : "pe.start(" + "\"" + state_registration_number + "\"" + ")" , 'PI' : "pi.start(" + "\"" + state_registration_number + "\"" + ")" , 'PR' : "pr.start(" + "\"" + state_registration_number + "\"" + ")" , 'RJ' : "rj.start(" + "\"" + state_registration_number + "\"" + ")" , 'RN' : "rn.start(" + "\"" + state_registration_number + "\"" + ")" , 'RO' : "ro.start(" + "\"" + state_registration_number + "\"" + ")" , 'RR' : "rr.start(" + "\"" + state_registration_number + "\"" + ")" , 'RS' : "rs.start(" + "\"" + state_registration_number + "\"" + ")" , 'SC' : "sc.start(" + "\"" + state_registration_number + "\"" + ")" , 'SE' : "se.start(" + "\"" + state_registration_number + "\"" + ")" , 'SP' : "sp.start(" + "\"" + state_registration_number + "\"" + ")" , 'TO' : "to.start(" + "\"" + state_registration_number + "\"" + ")" } exec ( 'validity = ' + states_validations [ state_abbreviation ] ) return validity | This function is like a Facade to another modules that makes their own state validation . |
47,524 | def to_dict ( self ) : def exception_to_dict ( e ) : try : return e . to_dict ( ) except AttributeError : return { "type" : e . __class__ . __name__ , "error" : str ( e ) , } result = { "errors" : [ exception_to_dict ( e ) for e in self . errors ] } if self . index is not None : result [ "index" ] = self . index else : result [ "attr" ] = self . attr if self . attr is not None else "<root>" return result | Return a dictionary representation of the error . |
47,525 | def _sorted_actions ( self ) : for a in filter ( lambda _ : not _ . last and not self . is_action ( _ , 'parsers' ) , self . _actions ) : yield a for a in filter ( lambda _ : _ . last and not self . is_action ( _ , 'parsers' ) , self . _actions ) : yield a for a in filter ( lambda _ : self . is_action ( _ , 'parsers' ) , self . _actions ) : yield a | Generate the sorted list of actions based on the last attribute . |
47,526 | def demo_args ( self ) : argv = random . choice ( self . examples ) . replace ( "--demo" , "" ) self . _reparse_args [ 'pos' ] = shlex . split ( argv ) | Additional method for replacing input arguments by demo ones . |
47,527 | def output ( f ) : def wrapper ( self , * args , ** kwargs ) : try : text = kwargs . get ( 'text' ) or args [ 0 ] except IndexError : text = True _ = f ( self , * args , ** kwargs ) if text : return _ elif _ is not None and isinstance ( _ , string_types ) : filename = "{}.{}" . format ( self . filename , f . __name__ ) while exists ( filename ) : name , ext = splitext ( filename ) try : name , i = name . split ( '-' ) i = int ( i ) + 1 except ValueError : i = 2 filename = "{}-{}" . format ( name , i ) + ext with open ( filename , 'w' ) as out : out . write ( _ ) return wrapper | This decorator allows to choose to return an output as text or to save it to a file . |
47,528 | def html ( self , text = TEXT ) : self . logger . debug ( "Generating the HTML report{}..." . format ( [ "" , " (text only)" ] [ text ] ) ) html = [ ] for piece in self . _pieces : if isinstance ( piece , string_types ) : html . append ( markdown2 . markdown ( piece , extras = [ "tables" ] ) ) elif isinstance ( piece , Element ) : html . append ( piece . html ( ) ) return "\n\n" . join ( html ) | Generate an HTML file from the report data . |
47,529 | def pdf ( self , text = TEXT ) : self . logger . debug ( "Generating the PDF report..." ) html = HTML ( string = self . html ( ) ) css_file = self . css or join ( dirname ( abspath ( __file__ ) ) , "{}.css" . format ( self . theme ) ) css = [ css_file , CSS ( string = PAGE_CSS % self . __dict__ ) ] html . write_pdf ( "{}.pdf" . format ( self . filename ) , stylesheets = css ) | Generate a PDF file from the report data . |
47,530 | def csv ( self , text = TEXT , sep = ',' , index = True , float_fmt = "%.2g" ) : return self . _data . to_csv ( sep = sep , index = index , float_format = float_fmt ) | Generate a CSV table from the table data . |
47,531 | def md ( self , text = TEXT , float_format = "%.2g" ) : cols = self . _data . columns hl = pd . DataFrame ( [ [ "---" ] * len ( cols ) ] , index = [ "---" ] , columns = cols ) df = pd . concat ( [ hl , self . _data ] ) return df . to_csv ( sep = '|' , index = True , float_format = float_format ) | Generate Markdown from the table data . |
47,532 | def xml ( self , text = TEXT ) : def convert ( line ) : xml = " <item>\n" for f in line . index : xml += " <field name=\"%s\">%s</field>\n" % ( f , line [ f ] ) xml += " </item>\n" return xml return "<items>\n" + '\n' . join ( self . _data . apply ( convert , axis = 1 ) ) + "</items>" | Generate an XML output from the report data . |
47,533 | def from_argspec ( argspec ) : attributes = getattr ( argspec , "args" , [ ] ) + getattr ( argspec , "keywords" , [ ] ) defaults = argspec . defaults or [ ] arguments , keywords = [ ] , { } attribute_list = ( attributes [ : - len ( defaults ) ] if len ( defaults ) != 0 else attributes [ : ] ) for name in attribute_list : if name == "self" : continue typ = argspec . annotations . get ( name ) arguments . append ( Argument ( name , NoDefault , typ ) ) if len ( defaults ) != 0 : for name , default in zip ( attributes [ - len ( defaults ) : ] , defaults ) : typ = argspec . annotations . get ( name ) keywords [ name ] = Argument ( name , default , typ ) return FunctionSignature ( arguments , keywords ) | retrieve a FunctionSignature object from the argspec and the annotations passed . |
47,534 | def split_args ( self , arg_dict ) : pos_args = [ ] for arg in self . args : pos_args . append ( arg_dict [ arg . name ] ) del arg_dict [ arg . name ] return pos_args , arg_dict | given a dictionary of arguments split them into args and kwargs |
47,535 | def pretty_format_args ( * args , ** kwargs ) : args = list ( [ repr ( a ) for a in args ] ) for key , value in kwargs . items ( ) : args . append ( "%s=%s" % ( key , repr ( value ) ) ) return "(%s)" % ", " . join ( [ a for a in args ] ) | Take the args and kwargs that are passed them and format in a prototype style . |
47,536 | def render ( file ) : with file . open ( ) as fp : encoding = detect_encoding ( fp , default = 'utf-8' ) file_content = fp . read ( ) . decode ( encoding ) parsed_xml = xml . dom . minidom . parseString ( file_content ) return parsed_xml . toprettyxml ( indent = ' ' , newl = '' ) | Pretty print the XML file for rendering . |
47,537 | def validate_xml ( file ) : max_file_size = current_app . config . get ( 'PREVIEWER_MAX_FILE_SIZE_BYTES' , 1 * 1024 * 1024 ) if file . size > max_file_size : return False with file . open ( ) as fp : try : content = fp . read ( ) . decode ( 'utf-8' ) xml . dom . minidom . parseString ( content ) return True except : return False | Validate an XML file . |
47,538 | def _get_context ( argspec , kwargs ) : if argspec . keywords is not None : return kwargs return dict ( ( arg , kwargs [ arg ] ) for arg in argspec . args if arg in kwargs ) | Prepare a context for the serialization . |
47,539 | def get ( self , obj , ** kwargs ) : assert self . getter is not None , "Getter accessor is not specified." if callable ( self . getter ) : return self . getter ( obj , ** _get_context ( self . _getter_argspec , kwargs ) ) assert isinstance ( self . getter , string_types ) , "Accessor must be a function or a dot-separated string." for attr in self . getter . split ( "." ) : if isinstance ( obj , dict ) : obj = obj [ attr ] else : obj = getattr ( obj , attr ) if callable ( obj ) : return obj ( ) return obj | Get an attribute from a value . |
47,540 | def set ( self , obj , value ) : assert self . setter is not None , "Setter accessor is not specified." if callable ( self . setter ) : return self . setter ( obj , value ) assert isinstance ( self . setter , string_types ) , "Accessor must be a function or a dot-separated string." def _set ( obj , attr , value ) : if isinstance ( obj , dict ) : obj [ attr ] = value else : setattr ( obj , attr , value ) return value path = self . setter . split ( "." ) for attr in path [ : - 1 ] : obj = _set ( obj , attr , { } ) _set ( obj , path [ - 1 ] , value ) | Set value for obj s attribute . |
47,541 | def accessor ( self ) : if isinstance ( self . attr , Accessor ) : return self . attr if callable ( self . attr ) : return Accessor ( getter = self . attr ) attr = self . attr or self . name return Accessor ( getter = attr , setter = attr ) | Get an attribute s accessor with the getter and the setter . |
47,542 | def serialize ( self , value , ** kwargs ) : if types . Type . is_type ( self . attr_type ) : try : value = self . accessor . get ( value , ** kwargs ) except ( AttributeError , KeyError ) : if not hasattr ( self , "default" ) and self . required : raise value = self . default ( ) if callable ( self . default ) else self . default return self . attr_type . serialize ( value , ** _get_context ( self . _attr_type_serialize_argspec , kwargs ) ) return self . attr_type | Serialize the attribute of the input data . |
47,543 | def deserialize ( self , value , ** kwargs ) : compartment = value if self . compartment is not None : compartment = value [ self . compartment ] try : value = self . accessor . get ( compartment , ** kwargs ) except ( KeyError , AttributeError ) : if not hasattr ( self , "default" ) and self . required : raise return self . default ( ) if callable ( self . default ) else self . default return self . attr_type . deserialize ( value , ** kwargs ) | Deserialize the attribute from a HAL structure . |
47,544 | def key ( self ) : if self . curie is None : return self . name return ":" . join ( ( self . curie . name , self . name ) ) | Embedded supports curies . |
47,545 | def deserialize ( cls , value , output = None , ** kwargs ) : errors = [ ] result = { } for attr in cls . __attrs__ . values ( ) : try : result [ attr . name ] = attr . deserialize ( value , ** kwargs ) except NotImplementedError : continue except ValueError as e : errors . append ( exceptions . ValidationError ( e , attr . name ) ) except exceptions . ValidationError as e : e . attr = attr . name errors . append ( e ) except ( KeyError , AttributeError ) : if attr . required : errors . append ( exceptions . ValidationError ( "Missing attribute." , attr . name ) ) if errors : raise exceptions . ValidationError ( errors ) if output is None : return result for attr in cls . __attrs__ . values ( ) : if attr . name in result : attr . accessor . set ( output , result [ attr . name ] ) | Deserialize the HAL structure into the output value . |
47,546 | def neg_int ( i ) : try : if isinstance ( i , string_types ) : i = int ( i ) if not isinstance ( i , int ) or i > 0 : raise Exception ( ) except : raise ValueError ( "Not a negative integer" ) return i | Simple negative integer validation . |
47,547 | def pos_int ( i ) : try : if isinstance ( i , string_types ) : i = int ( i ) if not isinstance ( i , int ) or i < 0 : raise Exception ( ) except : raise ValueError ( "Not a positive integer" ) return i | Simple positive integer validation . |
47,548 | def ints ( l , ifilter = lambda x : x , idescr = None ) : if isinstance ( l , string_types ) : if l [ 0 ] == '[' and l [ - 1 ] == ']' : l = l [ 1 : - 1 ] l = list ( map ( lambda x : x . strip ( ) , l . split ( ',' ) ) ) try : l = list ( map ( ifilter , list ( map ( int , l ) ) ) ) except : raise ValueError ( "Bad list of {}integers" . format ( "" if idescr is None else idescr + " " ) ) return l | Parses a comma - separated list of ints . |
47,549 | def ip_address_list ( ips ) : try : return ip_address ( ips ) except ValueError : pass return list ( ipaddress . ip_network ( u ( ips ) ) . hosts ( ) ) | IP address range validation and expansion . |
47,550 | def port_number ( port ) : try : port = int ( port ) except ValueError : raise ValueError ( "Bad port number" ) if not 0 <= port < 2 ** 16 : raise ValueError ( "Bad port number" ) return port | Port number validation . |
47,551 | def port_number_range ( prange ) : try : return port_number ( prange ) except ValueError : pass try : bounds = list ( map ( int , re . match ( r'^(\d+)\-(\d+)$' , prange ) . groups ( ) ) ) if bounds [ 0 ] > bounds [ 1 ] : raise AttributeError ( ) except ( AttributeError , TypeError ) : raise ValueError ( "Bad port number range" ) return list ( range ( bounds [ 0 ] , bounds [ 1 ] + 1 ) ) | Port number range validation and expansion . |
47,552 | def parse_header_part ( self , data ) : packet_length = data [ 0 ] packet_type = data [ 1 ] packet_subtype = data [ 2 ] sequence_number = data [ 3 ] return { 'packet_length' : packet_length , 'packet_type' : packet_type , 'packet_type_name' : self . PACKET_TYPES . get ( packet_type ) , 'packet_subtype' : packet_subtype , 'packet_subtype_name' : self . PACKET_SUBTYPES . get ( packet_subtype ) , 'sequence_number' : sequence_number } | Extracts and converts the RFX common header part of all valid packets to a plain dictionary . RFX header part is the 4 bytes prior the sensor vendor specific data part . |
47,553 | def load ( self , data ) : self . loaded_at = datetime . utcnow ( ) self . raw = data self . data = self . parse ( data ) return self . data | This is the entrance method for all data which is used to store the raw data and start parsing the data . |
47,554 | def validate_packet ( self , data ) : expected_length = data [ 0 ] + 1 if len ( data ) != expected_length : raise InvalidPacketLength ( "Expected packet length to be %s bytes but it was %s bytes" % ( expected_length , len ( data ) ) ) if expected_length < 4 : raise MalformedPacket ( "Expected packet length to be larger than 4 bytes but \ it was %s bytes" % ( len ( data ) ) ) packet_type = data [ 1 ] if self . PACKET_TYPES and packet_type not in self . PACKET_TYPES : types = "," . join ( "0x{:02x}" . format ( pt ) for pt in self . PACKET_TYPES ) raise UnknownPacketType ( "Expected packet type to be one of [%s] but recieved %s" % ( types , packet_type ) ) sub_type = data [ 2 ] if self . PACKET_SUBTYPES and sub_type not in self . PACKET_SUBTYPES : types = "," . join ( "0x{:02x}" . format ( pt ) for pt in self . PACKET_SUBTYPES ) raise UnknownPacketSubtype ( "Expected packet type to be one of [%s] but recieved %s" % ( types , sub_type ) ) return True | Validate a packet against this packet handler and determine if it meets the requirements . This is done by checking the following conditions are true . |
47,555 | def _enforce_instance ( model_or_class ) : if isinstance ( model_or_class , type ) and issubclass ( model_or_class , BaseType ) : return model_or_class ( ) return model_or_class | It s a common mistake to not initialize a schematics class . We should handle that by just calling the default constructor . |
47,556 | def render ( file ) : fp = file . open ( ) content = fp . read ( ) fp . close ( ) notebook = nbformat . reads ( content . decode ( 'utf-8' ) , as_version = 4 ) html_exporter = HTMLExporter ( ) html_exporter . template_file = 'basic' ( body , resources ) = html_exporter . from_notebook_node ( notebook ) return body , resources | Generate the result HTML . |
47,557 | def preview ( file ) : body , resources = render ( file ) default_ipython_style = resources [ 'inlining' ] [ 'css' ] [ 1 ] return render_template ( 'invenio_previewer/ipynb.html' , file = file , content = body , style = default_ipython_style ) | Render the IPython Notebook . |
47,558 | def transmute_route ( app , fn , context = default_context ) : transmute_func = TransmuteFunction ( fn ) routes , handler = create_routes_and_handler ( transmute_func , context ) for r in routes : if not hasattr ( app , SWAGGER_ATTR_NAME ) : setattr ( app , SWAGGER_ATTR_NAME , SwaggerSpec ( ) ) swagger_obj = getattr ( app , SWAGGER_ATTR_NAME ) swagger_obj . add_func ( transmute_func , context ) app . route ( r , methods = transmute_func . methods ) ( handler ) | this is the main interface to transmute . It will handle adding converting the python function into the a flask - compatible route and adding it to the application . |
47,559 | def create_routes_and_handler ( transmute_func , context ) : @ wraps ( transmute_func . raw_func ) def handler ( ) : exc , result = None , None try : args , kwargs = ParamExtractorFlask ( ) . extract_params ( context , transmute_func , request . content_type ) result = transmute_func ( * args , ** kwargs ) except Exception as e : exc = e exc . __traceback__ = sys . exc_info ( ) [ 2 ] response = transmute_func . process_result ( context , result , exc , request . content_type ) return Response ( response [ "body" ] , status = response [ "code" ] , mimetype = response [ "content-type" ] , headers = response [ "headers" ] ) return ( _convert_paths_to_flask ( transmute_func . paths ) , handler ) | return back a handler that is the api generated from the transmute_func and a list of routes it should be mounted to . |
47,560 | def add_swagger ( app , json_route , html_route , ** kwargs ) : spec = getattr ( app , SWAGGER_ATTR_NAME ) if spec : spec = spec . swagger_definition ( ** kwargs ) else : spec = { } encoded_spec = json . dumps ( spec ) . encode ( "UTF-8" ) @ app . route ( json_route ) def swagger ( ) : return Response ( encoded_spec , headers = { "Access-Control-Allow-Origin" : "*" } , content_type = "application/json" , ) static_root = get_swagger_static_root ( ) swagger_body = generate_swagger_html ( STATIC_PATH , json_route ) . encode ( "utf-8" ) @ app . route ( html_route ) def swagger_ui ( ) : return Response ( swagger_body , content_type = "text/html" ) blueprint = Blueprint ( 'swagger' , __name__ , static_url_path = STATIC_PATH , static_folder = static_root ) app . register_blueprint ( blueprint ) | add a swagger html page and a swagger . json generated from the routes added to the app . |
47,561 | def _log_enabled_protocols ( self , flags , protocols ) : enabled , disabled = [ ] , [ ] for procol , flag in sorted ( zip ( protocols , flags ) ) : if flag == '1' : enabled . append ( procol ) status = 'Enabled' else : disabled . append ( procol ) status = 'Disabled' message = "{0:21}: {1}" . format ( procol , status ) self . log . info ( message ) return enabled , disabled | Given a list of single character strings of 1 s and 0 s and a list of protocol names . Log the status of each protocol where 1 is enabled and 0 is disabled . The order of the lists here is important as they need to be zipped together to create the mapping . Then return a tuple of two lists containing the names of the enabled and disabled protocols . |
47,562 | def parse ( self , data ) : self . validate_packet ( data ) packet_length = data [ 0 ] packet_type = data [ 1 ] sub_type = data [ 2 ] sequence_number = data [ 3 ] command_type = data [ 4 ] transceiver_type = data [ 5 ] transceiver_type_text = _MSG1_RECEIVER_TYPE . get ( data [ 5 ] ) firmware_version = data [ 6 ] flags = self . _int_to_binary_list ( data [ 7 ] ) flags . extend ( self . _int_to_binary_list ( data [ 8 ] ) ) flags . extend ( self . _int_to_binary_list ( data [ 9 ] ) ) enabled , disabled = self . _log_enabled_protocols ( flags , PROTOCOLS ) return { 'packet_length' : packet_length , 'packet_type' : packet_type , 'packet_type_name' : self . PACKET_TYPES . get ( packet_type ) , 'sequence_number' : sequence_number , 'sub_type' : sub_type , 'sub_type_name' : self . PACKET_SUBTYPES . get ( sub_type ) , 'command_type' : command_type , 'transceiver_type' : transceiver_type , 'transceiver_type_text' : transceiver_type_text , 'firmware_version' : firmware_version , 'enabled_protocols' : enabled , 'disabled_protocols' : disabled , } | Parse a 13 byte packet in the Status format . |
47,563 | def _capture_variable ( iterator , parameters ) : key = "" next_c = next ( iterator ) while next_c != "}" : key += next_c next_c = next ( iterator ) next ( iterator ) return parameters [ key ] | return the replacement string . this assumes the preceeding {{ has already been popped off . |
47,564 | def rgb_distance ( rgb1 , rgb2 ) : return sum ( map ( lambda c : ( c [ 0 ] - c [ 1 ] ) ** 2 , zip ( rgb1 , rgb2 ) ) ) | Calculate the distance between two RGB sequences . |
47,565 | def rgb_reduce ( r , g , b , mode = 8 ) : colours = ANSI_COLOURS [ : mode ] matches = [ ( rgb_distance ( c , map ( int , [ r , g , b ] ) ) , i ) for i , c in enumerate ( colours ) ] matches . sort ( ) return sequence ( 'm' ) ( str ( 30 + matches [ 0 ] [ 1 ] ) ) | Convert an RGB colour to 8 or 16 colour ANSI graphics . |
47,566 | def rgb256 ( r , g , b ) : grey = False poss = True step = 2.5 while poss : if r < step or g < step or b < step : grey = r < step and g < step and b < step poss = False step += 42.5 if grey : colour = 232 + int ( float ( sum ( [ r , g , b ] ) / 33.0 ) ) else : colour = sum ( [ 16 ] + [ int ( 6 * float ( val ) / 256 ) * mod for val , mod in ( ( r , 36 ) , ( g , 6 ) , ( b , 1 ) ) ] ) return sequence ( 'm' , fields = 3 ) ( 38 , 5 , colour ) | Convert an RGB colour to 256 colour ANSI graphics . |
47,567 | def uri ( self ) : return url_for ( '.{0}_files' . format ( self . pid . pid_type ) , pid_value = self . pid . pid_value , filename = self . file . key ) | Get file download link . |
47,568 | def has_extensions ( self , * exts ) : file_ext = splitext ( self . filename ) [ 1 ] file_ext = file_ext . lower ( ) for e in exts : if file_ext == e : return True return False | Check if file has one of the extensions . |
47,569 | def render ( file ) : with file . open ( ) as fp : encoding = detect_encoding ( fp , default = 'utf-8' ) file_content = fp . read ( ) . decode ( encoding ) json_data = json . loads ( file_content , object_pairs_hook = OrderedDict ) return json . dumps ( json_data , indent = 4 , separators = ( ',' , ': ' ) ) | Pretty print the JSON file for rendering . |
47,570 | def validate_json ( file ) : max_file_size = current_app . config . get ( 'PREVIEWER_MAX_FILE_SIZE_BYTES' , 1 * 1024 * 1024 ) if file . size > max_file_size : return False with file . open ( ) as fp : try : json . loads ( fp . read ( ) . decode ( 'utf-8' ) ) return True except : return False | Validate a JSON file . |
47,571 | def optspace ( edm_missing , rank , niter = 500 , tol = 1e-6 , print_out = False ) : from . opt_space import opt_space N = edm_missing . shape [ 0 ] X , S , Y , __ = opt_space ( edm_missing , r = rank , niter = niter , tol = tol , print_out = print_out ) edm_complete = X . dot ( S . dot ( Y . T ) ) edm_complete [ range ( N ) , range ( N ) ] = 0.0 return edm_complete | Complete and denoise EDM using OptSpace algorithm . |
47,572 | def rank_alternation ( edm_missing , rank , niter = 50 , print_out = False , edm_true = None ) : from pylocus . basics import low_rank_approximation errs = [ ] N = edm_missing . shape [ 0 ] edm_complete = edm_missing . copy ( ) edm_complete [ edm_complete == 0 ] = np . mean ( edm_complete [ edm_complete > 0 ] ) for i in range ( niter ) : edm_complete = low_rank_approximation ( edm_complete , rank ) edm_complete [ edm_missing > 0 ] = edm_missing [ edm_missing > 0 ] edm_complete [ range ( N ) , range ( N ) ] = 0.0 edm_complete [ edm_complete < 0 ] = 0.0 edm_complete = 0.5 * ( edm_complete + edm_complete . T ) if edm_true is not None : err = np . linalg . norm ( edm_complete - edm_true ) errs . append ( err ) return edm_complete , errs | Complete and denoise EDM using rank alternation . |
47,573 | def completion_acd ( edm , X0 , W = None , tol = 1e-6 , sweeps = 3 ) : from . algorithms import reconstruct_acd Xhat , costs = reconstruct_acd ( edm , X0 , W , tol = tol , sweeps = sweeps ) return get_edm ( Xhat ) | Complete an denoise EDM using alternating decent . |
47,574 | def completion_dwmds ( edm , X0 , W = None , tol = 1e-10 , sweeps = 100 ) : from . algorithms import reconstruct_dwmds Xhat , costs = reconstruct_dwmds ( edm , X0 , W , n = 1 , tol = tol , sweeps = sweeps ) return get_edm ( Xhat ) | Complete an denoise EDM using dwMDS . |
47,575 | def url_spec ( transmute_path , handler , * args , ** kwargs ) : p = _to_tornado_pattern ( transmute_path ) for m in METHODS : method = getattr ( handler , m ) if hasattr ( method , "transmute_func" ) : method . transmute_func . paths . add ( transmute_path ) return tornado . web . URLSpec ( p , handler , * args , ** kwargs ) | convert the transmute_path to a tornado compatible regex and return a tornado url object . |
47,576 | def get_orientation ( k , i , j ) : from pylocus . basics_angles import from_0_to_2pi theta_ij = own . abs_angles [ i , j ] theta_ji = own . abs_angles [ j , i ] xi = own . points [ i , 0 ] xj = own . points [ j , 0 ] yi = own . points [ i , 1 ] yj = own . points [ j , 1 ] w = np . array ( [ yi - yj , xj - xi ] ) test = np . dot ( own . points [ k , : ] - own . points [ i , : ] , w ) > 0 theta_ik = truth . abs_angles [ i , k ] diff = from_0_to_2pi ( theta_ik - theta_ij ) test2 = ( diff > 0 and diff < pi ) assert ( test == test2 ) , "diff: %r, scalar prodcut: %r" % ( diff , np . dot ( own . points [ k , : ] - own . points [ i , : ] , w ) ) thetai_jk = truth . get_theta ( i , j , k ) thetaj_ik = truth . get_theta ( j , i , k ) if test : theta_ik = theta_ij + thetai_jk theta_jk = theta_ji - thetaj_ik else : theta_ik = theta_ij - thetai_jk theta_jk = theta_ji + thetaj_ik theta_ik = from_0_to_2pi ( theta_ik ) theta_jk = from_0_to_2pi ( theta_jk ) return theta_ik , theta_jk | calculate angles theta_ik and theta_jk theta produce point Pk . Should give the same as get_absolute_angle! |
47,577 | def get_G ( self , k , add_noise = True ) : G = np . ones ( ( self . N - 1 , self . N - 1 ) ) if ( add_noise ) : noise = pi * 0.1 * np . random . rand ( ( self . N - 1 ) * ( self . N - 1 ) ) . reshape ( ( self . N - 1 , self . N - 1 ) ) other_indices = np . delete ( range ( self . N ) , k ) for idx , i in enumerate ( other_indices ) : for jdx , j in enumerate ( other_indices ) : if ( add_noise and i != j ) : thetak_ij = self . get_inner_angle ( k , ( i , j ) ) + noise [ idx , jdx ] else : thetak_ij = self . get_inner_angle ( k , ( i , j ) ) G [ idx , jdx ] = cos ( thetak_ij ) G [ jdx , idx ] = cos ( thetak_ij ) return G | get G matrix from angles . |
47,578 | def get_KE_constraints ( self ) : C2 = np . eye ( self . m ) C2 = C2 [ : self . m - 2 , : ] to_be_deleted = [ ] for idx_vij_1 in range ( self . m - 2 ) : idx_vij_2 = idx_vij_1 + 1 C2 [ idx_vij_1 , idx_vij_2 ] = - 1 i1 = np . where ( self . C [ idx_vij_1 , : ] == 1 ) [ 0 ] [ 0 ] i2 = np . where ( self . C [ idx_vij_2 , : ] == 1 ) [ 0 ] [ 0 ] j = np . where ( self . C [ idx_vij_1 , : ] == - 1 ) [ 0 ] [ 0 ] if i1 == i2 : i = i1 k = np . where ( self . C [ idx_vij_2 , : ] == - 1 ) [ 0 ] [ 0 ] i_indices = self . C [ : , j ] == 1 j_indices = self . C [ : , k ] == - 1 idx_vij_3 = np . where ( np . bitwise_and ( i_indices , j_indices ) ) [ 0 ] [ 0 ] C2 [ idx_vij_1 , idx_vij_3 ] = 1 else : to_be_deleted . append ( idx_vij_1 ) C2 = np . delete ( C2 , to_be_deleted , axis = 0 ) b = np . zeros ( ( C2 . shape [ 0 ] , 1 ) ) return C2 , b | Get linear constraints on KE matrix . |
47,579 | def _bytes_to_uint_48 ( self , bytes_ ) : return ( ( bytes_ [ 0 ] * pow ( 2 , 40 ) ) + ( bytes_ [ 1 ] * pow ( 2 , 32 ) ) + ( bytes_ [ 2 ] * pow ( 2 , 24 ) ) + ( bytes_ [ 3 ] << 16 ) + ( bytes_ [ 4 ] << 8 ) + bytes_ [ 4 ] ) | Converts an array of 6 bytes to a 48bit integer . |
47,580 | def validate_csv ( file ) : try : with file . open ( ) as fp : encoding = detect_encoding ( fp , default = 'utf-8' ) sample = fp . read ( current_app . config . get ( 'PREVIEWER_CSV_VALIDATION_BYTES' , 1024 ) ) delimiter = csv . Sniffer ( ) . sniff ( sample . decode ( encoding ) ) . delimiter is_valid = True except Exception as e : current_app . logger . debug ( 'File {0} is not valid CSV: {1}' . format ( file . uri , e ) ) encoding = '' delimiter = '' is_valid = False return { 'delimiter' : delimiter , 'encoding' : encoding , 'is_valid' : is_valid } | Return dialect information about given csv file . |
47,581 | def make_tree ( file ) : max_files_count = current_app . config . get ( 'PREVIEWER_ZIP_MAX_FILES' , 1000 ) tree = { 'type' : 'folder' , 'id' : - 1 , 'children' : { } } try : with file . open ( ) as fp : zf = zipfile . ZipFile ( fp ) sample = ' ' . join ( zf . namelist ( ) [ : max_files_count ] ) if not isinstance ( sample , binary_type ) : sample = sample . encode ( 'utf-16be' ) encoding = chardet . detect ( sample ) . get ( 'encoding' , 'utf-8' ) for i , info in enumerate ( zf . infolist ( ) ) : if i > max_files_count : raise BufferError ( 'Too many files inside the ZIP file.' ) comps = info . filename . split ( os . sep ) node = tree for c in comps : if not isinstance ( c , text_type ) : c = c . decode ( encoding ) if c not in node [ 'children' ] : if c == '' : node [ 'type' ] = 'folder' continue node [ 'children' ] [ c ] = { 'name' : c , 'type' : 'item' , 'id' : 'item{0}' . format ( i ) , 'children' : { } } node = node [ 'children' ] [ c ] node [ 'size' ] = info . file_size except BufferError : return tree , True , None except ( zipfile . LargeZipFile ) : return tree , False , 'Zipfile is too large to be previewed.' except Exception as e : current_app . logger . warning ( str ( e ) , exc_info = True ) return tree , False , 'Zipfile is not previewable.' return tree , False , None | Create tree structure from ZIP archive . |
47,582 | def children_to_list ( node ) : if node [ 'type' ] == 'item' and len ( node [ 'children' ] ) == 0 : del node [ 'children' ] else : node [ 'type' ] = 'folder' node [ 'children' ] = list ( node [ 'children' ] . values ( ) ) node [ 'children' ] . sort ( key = lambda x : x [ 'name' ] ) node [ 'children' ] = map ( children_to_list , node [ 'children' ] ) return node | Organize children structure . |
47,583 | def preview ( file ) : tree , limit_reached , error = make_tree ( file ) list = children_to_list ( tree ) [ 'children' ] return render_template ( "invenio_previewer/zip.html" , file = file , tree = list , limit_reached = limit_reached , error = error , js_bundles = current_previewer . js_bundles + [ 'previewer_fullscreen_js' ] , css_bundles = current_previewer . css_bundles , ) | Return appropriate template and pass the file and an embed flag . |
47,584 | def create_mask ( N , method = 'all' , nmissing = 0 ) : weights = np . ones ( ( N , N ) ) weights [ range ( N ) , range ( N ) ] = 0 if method == 'none' : return weights elif method == 'all' : all_indices = np . triu_indices ( N , 1 ) elif method == 'first' : all_indices = [ np . zeros ( N - 1 ) . astype ( np . int ) , np . arange ( 1 , N ) . astype ( np . int ) ] ntotal = len ( all_indices [ 0 ] ) choice = np . random . choice ( ntotal , nmissing , replace = False ) chosen = [ all_indices [ 0 ] [ choice ] , all_indices [ 1 ] [ choice ] ] weights [ chosen ] = 0 weights [ chosen [ 1 ] , chosen [ 0 ] ] = 0 return weights | Create weight mask according to method . |
47,585 | def describe ( ** kwargs ) : if isinstance ( kwargs . get ( "paths" ) , string_type ) : kwargs [ "paths" ] = [ kwargs [ "paths" ] ] if isinstance ( kwargs . get ( "methods" ) , string_type ) : kwargs [ "methods" ] = [ kwargs [ "methods" ] ] attrs = TransmuteAttributes ( ** kwargs ) def decorator ( f ) : if hasattr ( f , "transmute" ) : f . transmute = f . transmute | attrs else : f . transmute = attrs return f return decorator | describe is a decorator to customize the rest API that transmute generates such as choosing certain arguments to be query parameters or body parameters or a different method . |
47,586 | def rmse_2pi ( x , xhat ) : real_diff = from_0_to_pi ( x - xhat ) np . square ( real_diff , out = real_diff ) sum_ = np . sum ( real_diff ) return sqrt ( sum_ / len ( x ) ) | Calcualte rmse between vector or matrix x and xhat ignoring mod of 2pi . |
47,587 | def keys ( self ) : return_value = [ ] for s in self . serializers : return_value += s . content_type return return_value | return a list of the content types this set supports . |
47,588 | def _extract_path_parameters_from_paths ( paths ) : params = set ( ) for path in paths : parts = PART_REGEX . split ( path ) for p in parts : match = PARAM_REGEX . match ( p ) if match : params . add ( match . group ( "name" ) ) return params | from a list of paths return back a list of the arguments present in those paths . |
47,589 | def _build_body_schema ( serializer , body_parameters ) : description = "" if isinstance ( body_parameters , Param ) : schema = serializer . to_json_schema ( body_parameters . arginfo . type ) description = body_parameters . description required = True else : if len ( body_parameters ) == 0 : return None required = set ( ) body_properties = { } for name , param in body_parameters . items ( ) : arginfo = param . arginfo body_properties [ name ] = serializer . to_json_schema ( arginfo . type ) body_properties [ name ] [ "description" ] = param . description if arginfo . default is NoDefault : required . add ( name ) schema = { "type" : "object" , "required" : list ( required ) , "properties" : body_properties , } required = len ( required ) > 0 return BodyParameter ( { "name" : "body" , "description" : description , "required" : required , "schema" : schema , } ) | body is built differently since it s a single argument no matter what . |
47,590 | def procrustes ( anchors , X , scale = True , print_out = False ) : def centralize ( X ) : n = X . shape [ 0 ] ones = np . ones ( ( n , 1 ) ) return X - np . multiply ( 1 / n * np . dot ( ones . T , X ) , ones ) m = anchors . shape [ 0 ] N , d = X . shape assert m >= d , 'Have to give at least d anchor nodes.' X_m = X [ N - m : , : ] ones = np . ones ( ( m , 1 ) ) mux = 1 / m * np . dot ( ones . T , X_m ) muy = 1 / m * np . dot ( ones . T , anchors ) sigmax = 1 / m * np . linalg . norm ( X_m - mux ) ** 2 sigmaxy = 1 / m * np . dot ( ( anchors - muy ) . T , X_m - mux ) try : U , D , VT = np . linalg . svd ( sigmaxy ) except np . LinAlgError : print ( 'strange things are happening...' ) print ( sigmaxy ) print ( np . linalg . matrix_rank ( sigmaxy ) ) if ( scale ) : c = np . trace ( np . diag ( D ) ) / sigmax else : c = np . trace ( np . diag ( D ) ) / sigmax if ( print_out ) : print ( 'Optimal scale would be: {}. Setting it to 1 now.' . format ( c ) ) c = 1.0 R = np . dot ( U , VT ) t = muy . T - c * np . dot ( R , mux . T ) X_transformed = ( c * np . dot ( R , ( X - mux ) . T ) + muy . T ) . T return X_transformed , R , t , c | Fit X to anchors by applying optimal translation rotation and reflection . |
47,591 | def reconstruct_cdm ( dm , absolute_angles , all_points , W = None ) : from pylocus . point_set import dmi_from_V , sdm_from_dmi , get_V from pylocus . mds import signedMDS N = all_points . shape [ 0 ] V = get_V ( absolute_angles , dm ) dmx = dmi_from_V ( V , 0 ) dmy = dmi_from_V ( V , 1 ) sdmx = sdm_from_dmi ( dmx , N ) sdmy = sdm_from_dmi ( dmy , N ) points_x = signedMDS ( sdmx , W ) points_y = signedMDS ( sdmy , W ) Xhat = np . c_ [ points_x , points_y ] Y , R , t , c = procrustes ( all_points , Xhat , scale = False ) return Y | Reconstruct point set from angle and distance measurements using coordinate difference matrices . |
47,592 | def reconstruct_mds ( edm , all_points , completion = 'optspace' , mask = None , method = 'geometric' , print_out = False , n = 1 ) : from . point_set import dm_from_edm from . mds import MDS N = all_points . shape [ 0 ] d = all_points . shape [ 1 ] if mask is not None : edm_missing = np . multiply ( edm , mask ) if completion == 'optspace' : from . edm_completion import optspace edm_complete = optspace ( edm_missing , d + 2 ) elif completion == 'alternate' : from . edm_completion import rank_alternation edm_complete , errs = rank_alternation ( edm_missing , d + 2 , print_out = False , edm_true = edm ) else : raise NameError ( 'Unknown completion method {}' . format ( completion ) ) if ( print_out ) : err = np . linalg . norm ( edm_complete - edm ) ** 2 / np . linalg . norm ( edm ) ** 2 print ( '{}: relative error:{}' . format ( completion , err ) ) edm = edm_complete Xhat = MDS ( edm , d , method , False ) . T Y , R , t , c = procrustes ( all_points [ n : ] , Xhat , True ) return Y | Reconstruct point set using MDS and matrix completion algorithms . |
47,593 | def reconstruct_sdp ( edm , all_points , W = None , print_out = False , lamda = 1000 , ** kwargs ) : from . edm_completion import semidefinite_relaxation edm_complete = semidefinite_relaxation ( edm , lamda = lamda , W = W , print_out = print_out , ** kwargs ) Xhat = reconstruct_mds ( edm_complete , all_points , method = 'geometric' ) return Xhat , edm_complete | Reconstruct point set using semi - definite rank relaxation . |
47,594 | def get_lateration_parameters ( all_points , indices , index , edm , W = None ) : if W is None : W = np . ones ( edm . shape ) anchors = np . delete ( all_points , indices , axis = 0 ) r2 = np . delete ( edm [ index , : ] , indices ) w = np . delete ( W [ index , : ] , indices ) if np . isnan ( r2 ) . any ( ) : nan_measurements = np . where ( np . isnan ( r2 ) ) [ 0 ] r2 [ nan_measurements ] = 0.0 w [ nan_measurements ] = 0.0 if np . isnan ( w ) . any ( ) : nan_measurements = np . where ( np . isnan ( w ) ) [ 0 ] r2 [ nan_measurements ] = 0.0 w [ nan_measurements ] = 0.0 missing_anchors = np . where ( w == 0.0 ) [ 0 ] w = np . asarray ( np . delete ( w , missing_anchors ) ) r2 = np . asarray ( np . delete ( r2 , missing_anchors ) ) w . resize ( edm . shape [ 0 ] - len ( indices ) - len ( missing_anchors ) , 1 ) r2 . resize ( edm . shape [ 0 ] - len ( indices ) - len ( missing_anchors ) , 1 ) anchors = np . delete ( anchors , missing_anchors , axis = 0 ) assert w . shape [ 0 ] == anchors . shape [ 0 ] assert np . isnan ( w ) . any ( ) == False assert np . isnan ( r2 ) . any ( ) == False return anchors , w , r2 | Get parameters relevant for lateration from full all_points edm and W . |
47,595 | def _join_parameters ( base , nxt ) : if nxt is None : return base if isinstance ( base , set ) and isinstance ( nxt , set ) : return base | nxt else : return nxt | join parameters from the lhs to the rhs if compatible . |
47,596 | def get_swagger_operation ( self , context = default_context ) : consumes = produces = context . contenttype_serializers . keys ( ) parameters = get_swagger_parameters ( self . parameters , context ) responses = { "400" : Response ( { "description" : "invalid input received" , "schema" : Schema ( { "title" : "FailureObject" , "type" : "object" , "properties" : { "success" : { "type" : "boolean" } , "result" : { "type" : "string" } , } , "required" : [ "success" , "result" ] , } ) , } ) } for code , details in self . response_types . items ( ) : responses [ str ( code ) ] = details . swagger_definition ( context ) return Operation ( { "summary" : self . summary , "description" : self . description , "consumes" : consumes , "produces" : produces , "parameters" : parameters , "responses" : responses , "operationId" : self . raw_func . __name__ , "tags" : self . tags , } ) | get the swagger_schema operation representation . |
47,597 | def process_result ( self , context , result_body , exc , content_type ) : return process_result ( self , context , result_body , exc , content_type ) | given an result body and an exception object return the appropriate result object or raise an exception . |
47,598 | def _parse_response_types ( argspec , attrs ) : return_type = argspec . annotations . get ( "return" ) or None type_description = attrs . parameter_descriptions . get ( "return" , "" ) response_types = attrs . response_types . copy ( ) if return_type or len ( response_types ) == 0 : response_types [ attrs . success_code ] = ResponseType ( type = return_type , type_description = type_description , description = "success" , ) return response_types | from the given parameters return back the response type dictionaries . |
47,599 | def generate_swagger_html ( swagger_static_root , swagger_json_url ) : tmpl = _get_template ( "swagger.html" ) return tmpl . render ( swagger_root = swagger_static_root , swagger_json_url = swagger_json_url ) | given a root directory for the swagger statics and a swagger json path return back a swagger html designed to use those values . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.