idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
54,600
def get_capabilities_by_type ( self , strict_type_matching : bool = False ) -> Dict [ Type , Dict [ str , Dict [ str , Parser ] ] ] : check_var ( strict_type_matching , var_types = bool , var_name = 'strict_matching' ) res = dict ( ) for typ in self . get_all_supported_types ( ) : res [ typ ] = self . get_capabilities_...
For all types that are supported lists all extensions that can be parsed into such a type . For each extension provides the list of parsers supported . The order is most pertinent first
54,601
def get_capabilities_by_ext ( self , strict_type_matching : bool = False ) -> Dict [ str , Dict [ Type , Dict [ str , Parser ] ] ] : check_var ( strict_type_matching , var_types = bool , var_name = 'strict_matching' ) res = dict ( ) for ext in self . get_all_supported_exts_for_type ( type_to_match = JOKER , strict = st...
For all extensions that are supported lists all types that can be parsed from this extension . For each type provide the list of parsers supported . The order is most pertinent first
54,602
def get_capabilities_for_ext ( self , ext , strict_type_matching : bool = False ) -> Dict [ Type , Dict [ str , Parser ] ] : r = dict ( ) for typ in self . get_all_supported_types_for_ext ( ext ) : matching = self . find_all_matching_parsers ( strict_type_matching , desired_type = typ , required_ext = ext ) [ 0 ] r [ t...
Utility method to return for a given file extension all known ways to parse a file with this extension organized by target object type .
54,603
def get_all_supported_types_for_ext ( self , ext_to_match : str , strict_type_matching : bool = False ) -> Set [ Type ] : matching = self . find_all_matching_parsers ( required_ext = ext_to_match , strict = strict_type_matching ) [ 0 ] return { typ for types in [ p . supported_types for p in ( matching [ 0 ] + matching...
Utility method to return the set of all supported types that may be parsed from files with the given extension . ext = JOKER is a joker that means all extensions
54,604
def get_all_supported_exts_for_type ( self , type_to_match : Type [ Any ] , strict : bool ) -> Set [ str ] : matching = self . find_all_matching_parsers ( desired_type = type_to_match , strict = strict ) [ 0 ] return { ext for exts in [ p . supported_exts for p in ( matching [ 0 ] + matching [ 1 ] + matching [ 2 ] ) ] ...
Utility method to return the set of all supported file extensions that may be converted to objects of the given type . type = JOKER is a joker that means all types
54,605
def find_all_matching_parsers ( self , strict : bool , desired_type : Type [ Any ] = JOKER , required_ext : str = JOKER ) -> Tuple [ Tuple [ List [ Parser ] , List [ Parser ] , List [ Parser ] ] , List [ Parser ] , List [ Parser ] , List [ Parser ] ] : check_var ( strict , var_types = bool , var_name = 'strict' ) desir...
Implementation of the parent method by lookin into the registry to find the most appropriate parsers to use in order
54,606
def _build_parser_for_fileobject_and_desiredtype ( self , obj_on_filesystem : PersistedObject , object_typ : Type [ T ] , logger : Logger = None ) -> Dict [ Type , Parser ] : parsers = OrderedDict ( ) errors = OrderedDict ( ) try : p = self . __build_parser_for_fileobject_and_desiredtype ( obj_on_filesystem , object_ty...
Builds a parser for each subtype of object_typ
54,607
def get_all_conversion_chains_to_type ( self , to_type : Type [ Any ] ) -> Tuple [ List [ Converter ] , List [ Converter ] , List [ Converter ] ] : return self . get_all_conversion_chains ( to_type = to_type )
Utility method to find all converters to a given type
54,608
def get_all_conversion_chains_from_type ( self , from_type : Type [ Any ] ) -> Tuple [ List [ Converter ] , List [ Converter ] , List [ Converter ] ] : return self . get_all_conversion_chains ( from_type = from_type )
Utility method to find all converters from a given type .
54,609
def get_all_conversion_chains ( self , from_type : Type [ Any ] = JOKER , to_type : Type [ Any ] = JOKER ) -> Tuple [ List [ Converter ] , List [ Converter ] , List [ Converter ] ] : pass
Utility method to find all converters or conversion chains matching the provided query .
54,610
def get_all_conversion_chains ( self , from_type : Type [ Any ] = JOKER , to_type : Type [ Any ] = JOKER ) -> Tuple [ List [ Converter ] , List [ Converter ] , List [ Converter ] ] : if from_type is JOKER and to_type is JOKER : matching_dest_generic = self . _generic_nonstrict_conversion_chains . copy ( ) + self . _gen...
Utility method to find matching converters or conversion chains .
54,611
def find_all_matching_parsers ( self , strict : bool , desired_type : Type [ Any ] = JOKER , required_ext : str = JOKER ) -> Tuple [ Tuple [ List [ Parser ] , List [ Parser ] , List [ Parser ] ] , List [ Parser ] , List [ Parser ] , List [ Parser ] ] : desired_type = get_validated_type ( desired_type , 'desired_type' ,...
Overrides the parent method to find parsers appropriate to a given extension and type . This leverages both the parser registry and the converter registry to propose parsing chains in a relevant order
54,612
def _complete_parsers_with_converters ( self , parser , parser_supported_type , desired_type , matching_c_generic_to_type , matching_c_approx_to_type , matching_c_exact_to_type ) : matching_p_generic , matching_p_generic_with_approx_chain , matching_p_approx , matching_p_approx_with_approx_chain , matching_p_exact , ma...
Internal method to create parsing chains made of a parser and converters from the provided lists . Once again a JOKER for a type means joker here .
54,613
def get_changed_files ( include_staged = False ) : process = subprocess . Popen ( [ 'git' , 'status' , '--porcelain' ] , stdout = subprocess . PIPE , stderr = subprocess . STDOUT ) stdout , __ = process . communicate ( ) if process . returncode != 0 : raise ValueError ( stdout ) files = [ ] for line in stdout . decode ...
Returns a list of the files that changed in the Git repository . This is used to check if the files that are supposed to be upgraded have changed . If so the upgrade will be prevented .
54,614
def _parse_doc ( docs ) : name = "(?:[a-zA-Z][a-zA-Z0-9-_]*)" re_var = re . compile ( r"^ *(%s)(?: */(%s))? *:(.*)$" % ( name , name ) ) re_opt = re . compile ( r"^ *(?:(-[a-zA-Z0-9]),? +)?--(%s)(?: *=(%s))? *:(.*)$" % ( name , name ) ) shorts , metavars , helps , description , epilog = { } , { } , { } , "" , "" if doc...
Converts a well - formed docstring into documentation to be fed into argparse .
54,615
def quote ( text ) : 'Handle quote characters' if not isinstance ( text , unicode ) : text = text . decode ( 'utf-8' ) for qp in QUOTEPAIRS : if text [ 0 ] == qp [ 0 ] and text [ - 1 ] == qp [ - 1 ] and len ( text ) >= 2 : return text for qp in QUOTEPAIRS : if qp [ 1 ] not in text : return qp [ 0 ] + text + qp [ 1 ] ra...
Handle quote characters
54,616
def reltags ( self , src , cache = None ) : if not self . _tag_assocs : return set ( ) if cache == None : cache = { } q = _otq ( ) q . append ( src ) updateq = _otq ( ) while q : i = q . popleft ( ) if i in cache : continue cache [ i ] = set ( ) for ( s , t ) in self . transitions_to ( i ) : q . append ( s ) if self . ...
returns all the tags that are relevant at this state cache should be a dictionary and it is updated by the function
54,617
def make_log_metric ( level = logging . INFO , msg = "%d items in %.2f seconds" ) : def log_metric ( name , count , elapsed ) : log_name = 'instrument.{}' . format ( name ) if name else 'instrument' logging . getLogger ( log_name ) . log ( level , msg , count , elapsed ) return log_metric
Make a new metric function that logs at the given level
54,618
def DBObject ( table_name , versioning = VersioningTypes . NONE ) : def wrapped ( cls ) : field_names = set ( ) all_fields = [ ] for name in dir ( cls ) : fld = getattr ( cls , name ) if fld and isinstance ( fld , Field ) : fld . name = name all_fields . append ( fld ) field_names . add ( name ) def add_missing_field (...
Classes annotated with DBObject gain persistence methods .
54,619
def train ( self , data , target , ** kwargs ) : non_predictors = [ i . replace ( " " , "_" ) . lower ( ) for i in list ( set ( data [ 'team' ] ) ) ] + [ "team" , "next_year_wins" ] self . column_names = [ l for l in list ( data . columns ) if l not in non_predictors ] results , folds = self . cross_validate ( data , n...
Used in the training phase . Override .
54,620
def as_command ( self ) : try : params = self . unbound_func . __click_params__ params . reverse ( ) del self . unbound_func . __click_params__ except AttributeError : params = [ ] help = inspect . getdoc ( self . real_func ) if isinstance ( help , bytes ) : help = help . decode ( 'utf-8' ) self . options . setdefault ...
Creates the click command wrapping the function
54,621
def main ( command_line_arguments = None ) : args = command_line_options ( command_line_arguments ) cmap = mpl . cm . get_cmap ( name = 'hsv' ) count = len ( args . files ) + ( len ( args . baselines ) if args . baselines else 0 ) colors = [ cmap ( i ) for i in numpy . linspace ( 0 , 1.0 , count + 1 ) ] logger . info (...
Reads score files computes error measures and plots curves .
54,622
def get_resources ( minify = False ) : all_resources = dict ( ) subclasses = resource_base . ResourceBase . __subclasses__ ( ) + resource_definitions . ResourceAngular . __subclasses__ ( ) for resource in subclasses : obj = resource ( minify ) all_resources [ resource . RESOURCE_NAME ] = dict ( css = tuple ( obj . reso...
Find all resources which subclass ResourceBase .
54,623
def search ( self , dsl , params ) : query_parameters = [ ] for key , value in params : query_parameters . append ( self . CITEDBY_THRIFT . kwargs ( str ( key ) , str ( value ) ) ) try : result = self . client . search ( dsl , query_parameters ) except self . CITEDBY_THRIFT . ServerError : raise ServerError ( 'you may ...
Free queries to ES index .
54,624
def raise_error ( error ) : exc_type = error . get ( 'exc_type' ) if exc_type and exc_type . startswith ( ERROR_PREFIX ) : exc_type = exc_type [ len ( ERROR_PREFIX ) : ] exc_cls = ERROR_TYPE_MAP . get ( exc_type , DashiError ) else : exc_cls = DashiError raise exc_cls ( ** error )
Intakes a dict of remote error information and raises a DashiError
54,625
def fire ( self , name , operation , args = None , ** kwargs ) : if args : if kwargs : raise TypeError ( "specify args dict or keyword arguments, not both" ) else : args = kwargs d = dict ( op = operation , args = args ) headers = { 'sender' : self . add_sysname ( self . name ) } dest = self . add_sysname ( name ) def ...
Send a message without waiting for a reply
54,626
def call ( self , name , operation , timeout = 10 , args = None , ** kwargs ) : if args : if kwargs : raise TypeError ( "specify args dict or keyword arguments, not both" ) else : args = kwargs msg_id = uuid . uuid4 ( ) . hex queue_arguments = { 'x-expires' : int ( ( timeout + 1 ) * 1000 ) } queue = Queue ( name = msg_...
Send a message and wait for reply
54,627
def handle ( self , operation , operation_name = None , sender_kwarg = None ) : if not self . _consumer : self . _consumer = DashiConsumer ( self , self . _conn , self . _name , self . _exchange , sysname = self . _sysname ) self . _consumer . add_op ( operation_name or operation . __name__ , operation , sender_kwarg =...
Handle an operation using the specified function
54,628
def link_exceptions ( self , custom_exception = None , dashi_exception = None ) : if custom_exception is None : raise ValueError ( "custom_exception must be set" ) if dashi_exception is None : raise ValueError ( "dashi_exception must be set" ) self . _linked_exceptions [ custom_exception ] = dashi_exception
Link a custom exception thrown on the receiver to a dashi exception
54,629
def ensure ( self , connection , func , * args , ** kwargs ) : channel = None while 1 : try : if channel is None : channel = connection . channel ( ) return func ( channel , * args , ** kwargs ) , channel except ( connection . connection_errors , IOError ) : self . _call_errback ( ) channel = self . connect ( connectio...
Perform an operation until success
54,630
def re_tab ( s ) : l = [ ] p = 0 for i in range ( 8 , len ( s ) , 8 ) : if s [ i - 2 : i ] == " " : l . append ( s [ p : i ] . rstrip ( ) + "\t" ) p = i if p == 0 : return s else : l . append ( s [ p : ] ) return "" . join ( l )
Return a tabbed string from an expanded one .
54,631
def read_next_line ( self ) : next_line = self . file . readline ( ) if not next_line or next_line [ - 1 : ] != '\n' : self . file = None else : next_line = next_line [ : - 1 ] expanded = next_line . expandtabs ( ) edit = urwid . Edit ( "" , expanded , allow_tab = True ) edit . set_edit_pos ( 0 ) edit . original_text =...
Read another line from the file .
54,632
def _get_at_pos ( self , pos ) : if pos < 0 : return None , None if len ( self . lines ) > pos : return self . lines [ pos ] , pos if self . file is None : return None , None assert pos == len ( self . lines ) , "out of order request?" self . read_next_line ( ) return self . lines [ - 1 ] , pos
Return a widget for the line number passed .
54,633
def split_focus ( self ) : focus = self . lines [ self . focus ] pos = focus . edit_pos edit = urwid . Edit ( "" , focus . edit_text [ pos : ] , allow_tab = True ) edit . original_text = "" focus . set_edit_text ( focus . edit_text [ : pos ] ) edit . set_edit_pos ( 0 ) self . lines . insert ( self . focus + 1 , edit )
Divide the focus edit widget at the cursor location .
54,634
def combine_focus_with_prev ( self ) : above , ignore = self . get_prev ( self . focus ) if above is None : return focus = self . lines [ self . focus ] above . set_edit_pos ( len ( above . edit_text ) ) above . set_edit_text ( above . edit_text + focus . edit_text ) del self . lines [ self . focus ] self . focus -= 1
Combine the focus edit widget with the one above .
54,635
def combine_focus_with_next ( self ) : below , ignore = self . get_next ( self . focus ) if below is None : return focus = self . lines [ self . focus ] focus . set_edit_text ( focus . edit_text + below . edit_text ) del self . lines [ self . focus + 1 ]
Combine the focus edit widget with the one below .
54,636
def handle_keypress ( self , k ) : if k == "esc" : self . save_file ( ) raise urwid . ExitMainLoop ( ) elif k == "delete" : self . walker . combine_focus_with_next ( ) elif k == "backspace" : self . walker . combine_focus_with_prev ( ) elif k == "enter" : self . walker . split_focus ( ) self . view . keypress ( size , ...
Last resort for keypresses .
54,637
def save_file ( self ) : l = [ ] walk = self . walker for edit in walk . lines : if edit . original_text . expandtabs ( ) == edit . edit_text : l . append ( edit . original_text ) else : l . append ( re_tab ( edit . edit_text ) ) while walk . file is not None : l . append ( walk . read_next_line ( ) ) outfile = open ( ...
Write the file out to disk .
54,638
def _media ( self ) : css = [ 'markymark/css/markdown-editor.css' ] iconlibrary_css = getattr ( settings , 'MARKYMARK_FONTAWESOME_CSS' , 'markymark/fontawesome/fontawesome.min.css' ) if iconlibrary_css : css . append ( iconlibrary_css ) media = forms . Media ( css = { 'all' : css } , js = ( 'markymark/js/markdown-edito...
Returns a forms . Media instance with the basic editor media and media from all registered extensions .
54,639
def getsuffix ( subject ) : index = subject . rfind ( '.' ) if index > subject . replace ( '\\' , '/' ) . rfind ( '/' ) : return subject [ index + 1 : ] return None
Returns the suffix of a filename . If the file has no suffix returns None . Can return an empty string if the filenam ends with a period .
54,640
def init_app ( self , app ) : app . config . setdefault ( 'STATICS_MINIFY' , False ) self . all_resources = ALL_RESOURCES_MINIFIED if app . config . get ( 'STATICS_MINIFY' ) else ALL_RESOURCES self . all_variables = ALL_VARIABLES if not hasattr ( app , 'extensions' ) : app . extensions = dict ( ) if 'statics' in app . ...
Initialize the extension .
54,641
def measure_board_rms ( control_board , n_samples = 10 , sampling_ms = 10 , delay_between_samples_ms = 0 ) : try : results = control_board . measure_impedance ( n_samples , sampling_ms , delay_between_samples_ms , True , True , [ ] ) except RuntimeError : logger . warning ( 'Error encountered during high-voltage RMS ' ...
Read RMS voltage samples from control board high - voltage feedback circuit .
54,642
def find_good ( control_board , actuation_steps , resistor_index , start_index , end_index ) : lower = start_index upper = end_index while lower < upper - 1 : index = lower + ( upper - lower ) / 2 v = actuation_steps [ index ] control_board . set_waveform_voltage ( v ) data = measure_board_rms ( control_board ) valid_d...
Use a binary search over the range of provided actuation_steps to find the maximum actuation voltage that is measured by the board feedback circuit using the specified feedback resistor .
54,643
def resistor_max_actuation_readings ( control_board , frequencies , oscope_reading_func ) : control_board . set_waveform_voltage ( 0 ) control_board . auto_adjust_amplifier_gain = False control_board . amplifier_gain = 1. target_voltage = 0.1 control_board . set_waveform_voltage ( target_voltage ) oscope_rms = oscope_r...
For each resistor in the high - voltage feedback resistor bank read the board measured voltage and the oscilloscope measured voltage for an actuation voltage that nearly saturates the feedback resistor .
54,644
def fit_feedback_params ( calibration , max_resistor_readings ) : R1 = 10e6 def fit_resistor_params ( x ) : resistor_index = x [ 'resistor index' ] . values [ 0 ] p0 = [ calibration . R_hv [ resistor_index ] , calibration . C_hv [ resistor_index ] ] def error ( p , df , R1 ) : v1 = compute_from_transfer_function ( cali...
Fit model of control board high - voltage feedback resistor and parasitic capacitance values based on measured voltage readings .
54,645
def update_control_board_calibration ( control_board , fitted_params ) : control_board . a0_series_resistance = fitted_params [ 'fitted R' ] . values control_board . a0_series_capacitance = fitted_params [ 'fitted C' ] . values
Update the control board with the specified fitted parameters .
54,646
def load ( self ) : data = self . dict_class ( ) for path in self . paths : if path in self . paths_loaded : continue try : with open ( path , 'r' ) as file : path_data = yaml . load ( file . read ( ) ) data = dict_merge ( data , path_data ) self . paths_loaded . add ( path ) except IOError : if not path . endswith ( '...
Load each path in order . Remember paths already loaded and only load new ones .
54,647
def _initialize ( self , settings_module ) : self . settings_list = [ ] for setting in dir ( global_settings ) : if setting == setting . upper ( ) : setattr ( self , setting , getattr ( global_settings , setting ) ) self . settings_list . append ( setting ) if settings_module is not None : self . SETTINGS_MODULE = sett...
Initialize the settings from a given settings_module settings_module - path to settings module
54,648
def _setup ( self ) : settings_module = None try : settings_module = os . environ [ global_settings . MODULE_VARIABLE ] except KeyError : error_message = "Settings not properly configured. Cannot find the environment variable {0}" . format ( global_settings . MODULE_VARIABLE ) log . exception ( error_message ) self . ...
Perform initial setup of the settings class such as getting the settings module and setting the settings
54,649
def _configure_logging ( self ) : if not self . LOGGING_CONFIG : dictConfig ( self . DEFAULT_LOGGING ) else : dictConfig ( self . LOGGING_CONFIG )
Setting up logging from logging config in settings
54,650
def ensure_context ( ** vars ) : ctx = _context_stack . top stacked = False if not ctx : ctx = Context ( ) stacked = True _context_stack . push ( ctx ) ctx . update ( vars ) try : yield ctx finally : if stacked : _context_stack . pop ( )
Ensures that a context is in the stack creates one otherwise .
54,651
def request_context ( app , request ) : vars = { } if request . view_args is not None : vars . update ( request . view_args ) vars . update ( { "request" : request , "GET" : AttrDict ( request . args . to_dict ( ) ) , "POST" : AttrDict ( request . form . to_dict ( ) ) , "app" : app , "config" : app . config , "session"...
Creates a Context instance from the given request object
54,652
def clone ( self , ** override_vars ) : c = Context ( self . vars , self . data ) c . executed_actions = set ( self . executed_actions ) c . vars . update ( override_vars ) return c
Creates a copy of this context
54,653
def mpl_get_cb_bound_below_plot ( ax ) : position = ax . get_position ( ) figW , figH = ax . get_figure ( ) . get_size_inches ( ) fig_aspect = figH / figW box_aspect = ax . get_data_ratio ( ) pb = position . frozen ( ) pb1 = pb . shrunk_to_aspect ( box_aspect , pb , fig_aspect ) . bounds ax_size = ax . get_position ( )...
Return the coordinates for a colorbar axes below the provided axes object . Take into account the changes of the axes due to aspect ratio settings .
54,654
def main ( ) : table = docraptor = DocRaptor ( ) print ( "Create test_basic.xls" ) with open ( "test_basic.xls" , "wb" ) as pdf_file : pdf_file . write ( docraptor . create ( { "document_content" : table , "document_type" : "xls" , "test" : True } ) . content )
Generate an XLS with specified content .
54,655
def restore_gc_state ( ) : old_isenabled = gc . isenabled ( ) old_flags = gc . get_debug ( ) try : yield finally : gc . set_debug ( old_flags ) ( gc . enable if old_isenabled else gc . disable ) ( )
Restore the garbage collector state on leaving the with block .
54,656
def add_view_file_mapping ( self , pattern , cls ) : if isinstance ( pattern , str ) : if not pattern . endswith ( "*" ) : _ , ext = os . path . splitext ( pattern ) self . allowed_extensions . add ( ext ) pattern = re . compile ( "^" + re . escape ( pattern ) . replace ( "\\*" , ".+" ) + "$" , re . I ) self . view_cla...
Adds a mapping between a file and a view class . Pattern can be an extension in the form . EXT or a filename .
54,657
def get_file_view_cls ( self , filename ) : if filename is None : return self . default_view_class for pattern , cls in self . view_class_files_map : if pattern . match ( filename ) : return cls return self . default_view_class
Returns the view class associated to a filename
54,658
def children ( self , vertex ) : return [ self . head ( edge ) for edge in self . out_edges ( vertex ) ]
Return the list of immediate children of the given vertex .
54,659
def parents ( self , vertex ) : return [ self . tail ( edge ) for edge in self . in_edges ( vertex ) ]
Return the list of immediate parents of this vertex .
54,660
def descendants ( self , start , generations = None ) : visited = self . vertex_set ( ) visited . add ( start ) to_visit = deque ( [ ( start , 0 ) ] ) while to_visit : vertex , depth = to_visit . popleft ( ) if depth == generations : continue for child in self . children ( vertex ) : if child not in visited : visited ....
Return the subgraph of all nodes reachable from the given start vertex including that vertex .
54,661
def ancestors ( self , start , generations = None ) : visited = self . vertex_set ( ) visited . add ( start ) to_visit = deque ( [ ( start , 0 ) ] ) while to_visit : vertex , depth = to_visit . popleft ( ) if depth == generations : continue for parent in self . parents ( vertex ) : if parent not in visited : visited . ...
Return the subgraph of all nodes from which the given vertex is reachable including that vertex .
54,662
def _component_graph ( self ) : sccs = [ ] stack = [ ] boundaries = [ ] identified = self . vertex_set ( ) index = self . vertex_dict ( ) to_do = [ ] def visit_vertex ( v ) : index [ v ] = len ( stack ) stack . append ( ( 'VERTEX' , v ) ) boundaries . append ( index [ v ] ) to_do . append ( ( leave_vertex , v ) ) to_do...
Compute the graph of strongly connected components .
54,663
def source_components ( self ) : raw_sccs = self . _component_graph ( ) vertex_to_root = self . vertex_dict ( ) non_sources = self . vertex_set ( ) for scc in raw_sccs : root = scc [ 0 ] [ 1 ] for item_type , w in scc : if item_type == 'VERTEX' : vertex_to_root [ w ] = root elif item_type == 'EDGE' : non_sources . add ...
Return the strongly connected components not reachable from any other component . Any component in the graph is reachable from one of these .
54,664
def strongly_connected_components ( self ) : raw_sccs = self . _component_graph ( ) sccs = [ ] for raw_scc in raw_sccs : sccs . append ( [ v for vtype , v in raw_scc if vtype == 'VERTEX' ] ) return [ self . full_subgraph ( scc ) for scc in sccs ]
Return list of strongly connected components of this graph .
54,665
def signature ( self ) : hashable_fields = [ 'MERCHANT' , 'ORDER_REF' , 'ORDER_DATE' , 'ORDER_SHIPPING' , 'PRICES_CURRENCY' , 'DISCOUNT' , 'DESTINATION_CITY' , 'DESTINATION_STATE' , 'DESTINATION_COUNTRY' , 'PAY_METHOD' , 'SELECTED_INSTALLMENTS_NO' , 'TESTORDER' ] result = text_type ( ) suffix = text_type ( ) for field ...
Compute the ORDER_HASH of the request .
54,666
def _prepare_orders ( self , orders ) : for detail in PAYU_ORDER_DETAILS : if not any ( [ detail in order for order in orders ] ) : for order in orders : order [ detail ] = PAYU_ORDER_DETAILS_DEFAULTS . get ( detail , None ) return orders
Each order needs to have all it s details filled with default value or None in case those are not already filled .
54,667
def staticfiles_url_fetcher ( url ) : if url . startswith ( '/' ) : base_url = staticfiles_storage . base_url filename = url . replace ( base_url , '' , 1 ) path = finders . find ( filename ) if path : with open ( path , 'rb' ) as f : data = f . read ( ) else : f = staticfiles_storage . open ( filename ) data = f . rea...
Returns the file matching url .
54,668
def render_pdf ( template , file_ , url_fetcher = staticfiles_url_fetcher , context = None , ) : context = context or { } html = get_template ( template ) . render ( context ) HTML ( string = html , base_url = 'not-used://' , url_fetcher = url_fetcher , ) . write_pdf ( target = file_ , )
Writes the PDF data into file_ . Note that file_ can actually be a Django Response object as well .
54,669
def encode_bytes ( src_buf , dst_file ) : if not isinstance ( src_buf , bytes ) : raise TypeError ( 'src_buf must by bytes.' ) len_src_buf = len ( src_buf ) assert 0 <= len_src_buf <= 2 ** 16 - 1 num_written_bytes = len_src_buf + 2 len_buf = FIELD_U16 . pack ( len_src_buf ) dst_file . write ( len_buf ) dst_file . write...
Encode a buffer length followed by the bytes of the buffer itself .
54,670
def decode_bytes ( f ) : buf = f . read ( FIELD_U16 . size ) if len ( buf ) < FIELD_U16 . size : raise UnderflowDecodeError ( ) ( num_bytes , ) = FIELD_U16 . unpack_from ( buf ) num_bytes_consumed = FIELD_U16 . size + num_bytes buf = f . read ( num_bytes ) if len ( buf ) < num_bytes : raise UnderflowDecodeError ( ) ret...
Decode a buffer length from a 2 - byte unsigned int then read the subsequent bytes .
54,671
def encode_utf8 ( s , f ) : encode = codecs . getencoder ( 'utf8' ) encoded_str_bytes , num_encoded_chars = encode ( s ) num_encoded_str_bytes = len ( encoded_str_bytes ) assert 0 <= num_encoded_str_bytes <= 2 ** 16 - 1 num_encoded_bytes = num_encoded_str_bytes + 2 f . write ( FIELD_U8 . pack ( ( num_encoded_str_bytes ...
UTF - 8 encodes string s to file - like object f according to the MQTT Version 3 . 1 . 1 specification in section 1 . 5 . 3 .
54,672
def encode_varint ( v , f ) : assert v >= 0 num_bytes = 0 while True : b = v % 0x80 v = v // 0x80 if v > 0 : b = b | 0x80 f . write ( FIELD_U8 . pack ( b ) ) num_bytes += 1 if v == 0 : break return num_bytes
Encode integer v to file f .
54,673
def unpack ( self , struct ) : v = struct . unpack ( self . read ( struct . size ) ) return v
Read as many bytes as are required to extract struct then unpack and return a tuple of the values .
54,674
def unpack_bytes ( self ) : num_bytes_consumed , b = decode_bytes ( self . __f ) self . __num_bytes_consumed += num_bytes_consumed return num_bytes_consumed , b
Unpack a utf - 8 string encoded as described in MQTT Version 3 . 1 . 1 section 1 . 5 . 3 line 177 . This is a 16 - bit unsigned length followed by a utf - 8 encoded string .
54,675
def read ( self , num_bytes ) : buf = self . __f . read ( num_bytes ) assert len ( buf ) <= num_bytes if len ( buf ) < num_bytes : raise UnderflowDecodeError ( ) self . __num_bytes_consumed += num_bytes return buf
Read num_bytes and return them .
54,676
def timeout ( self , value ) : if not self . params : self . params = dict ( timeout = value ) return self self . params [ 'timeout' ] = value return self
Specifies a timeout on the search query
54,677
def filtered ( self , efilter ) : if not self . params : self . params = { 'filter' : efilter } return self if not self . params . has_key ( 'filter' ) : self . params [ 'filter' ] = efilter return self self . params [ 'filter' ] . update ( efilter ) return self
Applies a filter to the search
54,678
def size ( self , value ) : if not self . params : self . params = dict ( size = value ) return self self . params [ 'size' ] = value return self
The number of hits to return . Defaults to 10
54,679
def from_offset ( self , value ) : if not self . params : self . params = dict ( { 'from' : value } ) return self self . params [ 'from' ] = value return self
The starting from index of the hits to return . Defaults to 0 .
54,680
def sorted ( self , fsort ) : if not self . params : self . params = dict ( ) self . params [ 'sort' ] = fsort return self
Allows to add one or more sort on specific fields . Each sort can be reversed as well . The sort is defined on a per field level with special field name for _score to sort by score .
54,681
def doc_create ( self , index , itype , value ) : request = self . session url = 'http://%s:%s/%s/%s/' % ( self . host , self . port , index , itype ) if self . verbose : print value response = request . post ( url , value ) return response
Creates a document
54,682
def search_index_simple ( self , index , key , search_term ) : request = self . session url = 'http://%s:%s/%s/_search?q=%s:%s' % ( self . host , self . port , index , key , search_term ) response = request . get ( url ) return response
Search the index using a simple key and search_term
54,683
def search_index_advanced ( self , index , query ) : request = self . session url = 'http://%s:%s/%s/_search' % ( self . host , self . port , index ) if self . params : content = dict ( query = query , ** self . params ) else : content = dict ( query = query ) if self . verbose : print content response = request . post...
Advanced search query against an entire index
54,684
def map ( self , index_name , index_type , map_value ) : request = self . session url = 'http://%s:%s/%s/%s/_mapping' % ( self . host , self . port , index_name , index_type ) content = { index_type : { 'properties' : map_value } } if self . verbose : print content response = request . put ( url , content ) return resp...
Enable a specific map for an index and type
54,685
def list_types ( index_name , host = 'localhost' , port = '9200' ) : return ElasticSearch ( host = host , port = port ) . type_list ( index_name )
Lists the context types available in an index
54,686
def type_list ( self , index_name ) : request = self . session url = 'http://%s:%s/%s/_mapping' % ( self . host , self . port , index_name ) response = request . get ( url ) if request . status_code == 200 : return response [ index_name ] . keys ( ) else : return response
List the types available in an index
54,687
def raw ( self , module , method = 'GET' , data = None ) : request = self . session url = 'http://%s:%s/%s' % ( self . host , self . port , module ) if self . verbose : print data if method == 'GET' : response = request . get ( url ) elif method == 'POST' : response = request . post ( url , data ) elif method == 'PUT' ...
Submits or requsts raw input
54,688
def inverse ( self , N ) : if N == 0 : return 0 lm , hm = 1 , 0 low , high = N % self . P , self . P while low > 1 : r = high // low nm , new = hm - lm * r , high - low * r lm , low , hm , high = nm , new , lm , low return lm % self . P
Returns the modular inverse of an integer with respect to the field characteristic P .
54,689
def is_on_curve ( self , point ) : X , Y = point . X , point . Y return ( pow ( Y , 2 , self . P ) - pow ( X , 3 , self . P ) - self . a * X - self . b ) % self . P == 0
Checks whether a point is on the curve .
54,690
def generate_private_key ( self ) : random_string = base64 . b64encode ( os . urandom ( 4096 ) ) . decode ( 'utf-8' ) binary_data = bytes ( random_string , 'utf-8' ) hash_object = hashlib . sha256 ( binary_data ) message_digest_bin = hash_object . digest ( ) message_digest_hex = binascii . hexlify ( message_digest_bin ...
Generates a private key based on the password .
54,691
def generate_public_key ( self ) : private_key = int ( self . private_key , 16 ) if private_key >= self . N : raise Exception ( 'Invalid private key.' ) G = JacobianPoint ( self . Gx , self . Gy , 1 ) public_key = G * private_key x_hex = '{0:0{1}x}' . format ( public_key . X , 64 ) y_hex = '{0:0{1}x}' . format ( public...
Generates a public key from the hex - encoded private key using elliptic curve cryptography . The private key is multiplied by a predetermined point on the elliptic curve called the generator point G resulting in the corresponding private key . The generator point is always the same for all Bitcoin users .
54,692
def to_affine ( self ) : X , Y , Z = self . x , self . y , self . inverse ( self . z ) return ( ( X * Z ** 2 ) % P , ( Y * Z ** 3 ) % P )
Converts this point to an affine representation .
54,693
def slope ( self , other ) : X1 , Y1 , X2 , Y2 = self . X , self . Y , other . X , other . Y Y3 = Y1 - Y2 X3 = X1 - X2 return ( Y3 * self . inverse ( X3 ) ) % self . P
Determines the slope between this point and another point .
54,694
def to_jacobian ( self ) : if not self : return JacobianPoint ( X = 0 , Y = 0 , Z = 0 ) return JacobianPoint ( X = self . X , Y = self . Y , Z = 1 )
Converts this point to a Jacobian representation .
54,695
def import_model ( self , name , path = "floyd.db.models" ) : if name in self . _model_cache : return self . _model_cache [ name ] try : model = getattr ( __import__ ( path , None , None , [ name ] ) , name ) self . _model_cache [ name ] = model except ImportError : return False return model
imports a model of name from path returning from local model cache if it has been previously loaded otherwise importing
54,696
def parse_md ( self ) : post_content = _MARKDOWN . convert ( self . raw_src ) if hasattr ( _MARKDOWN , 'Meta' ) : for key in _MARKDOWN . Meta : print "\t meta: %s: %s (%s)" % ( key , _MARKDOWN . Meta [ key ] [ 0 ] , type ( _MARKDOWN . Meta [ key ] [ 0 ] ) ) if key == 'pubdate' : setattr ( self , key , datetime . dateti...
Takes a post path and returns a dictionary of variables
54,697
def execute_train_task_with_dependencies ( self , task_cls , ** kwargs ) : log . info ( "Task {0}" . format ( get_task_name ( task_cls ) ) ) task_inst = task_cls ( ) for arg in task_inst . args : if arg not in kwargs : kwargs [ arg ] = task_inst . args [ arg ] if hasattr ( task_inst , "dependencies" ) : deps = task_ins...
Run the training as well as any dependencies of the training task_cls - class of a task
54,698
def execute_predict_task ( self , task_inst , predict_data , ** kwargs ) : result = task_inst . predict ( predict_data , ** task_inst . args ) return result
Do a prediction task_inst - instance of a task
54,699
def train ( self , ** kwargs ) : log . info ( "Starting to train..." ) if not self . setup_run : self . setup ( ) self . trained_tasks = [ ] for task in self . tasks : data = self . reformatted_input [ task . data_format ] [ 'data' ] target = self . reformatted_input [ task . data_format ] [ 'target' ] if data is None ...
Do the workflow training