idx int64 0 63k | question stringlengths 53 5.28k | target stringlengths 5 805 |
|---|---|---|
11,700 | def make_python_xref_nodes ( py_typestr , state , hide_namespace = False ) : if hide_namespace : template = ':py:obj:`~{}`\n' else : template = ':py:obj:`{}`\n' xref_text = template . format ( py_typestr ) return parse_rst_content ( xref_text , state ) | Make docutils nodes containing a cross - reference to a Python object . |
11,701 | def make_python_xref_nodes_for_type ( py_type , state , hide_namespace = False ) : if py_type . __module__ == 'builtins' : typestr = py_type . __name__ else : typestr = '.' . join ( ( py_type . __module__ , py_type . __name__ ) ) return make_python_xref_nodes ( typestr , state , hide_namespace = hide_namespace ) | Make docutils nodes containing a cross - reference to a Python object given the object s type . |
11,702 | def make_section ( section_id = None , contents = None ) : section = nodes . section ( ) section [ 'ids' ] . append ( nodes . make_id ( section_id ) ) section [ 'names' ] . append ( section_id ) if contents is not None : section . extend ( contents ) return section | Make a docutils section node . |
11,703 | def split_role_content ( role_rawsource ) : parts = { 'last_component' : False , 'display' : None , 'ref' : None } if role_rawsource . startswith ( '~' ) : parts [ 'last_component' ] = True role_rawsource = role_rawsource . lstrip ( '~' ) match = ROLE_DISPLAY_PATTERN . match ( role_rawsource ) if match : parts [ 'displ... | Split the rawsource of a role into standard components . |
11,704 | def largest_graph ( mol ) : mol . require ( "Valence" ) mol . require ( "Topology" ) m = clone ( mol ) if m . isolated : for k in itertools . chain . from_iterable ( m . isolated ) : m . remove_atom ( k ) return m | Return a molecule which has largest graph in the compound Passing single molecule object will results as same as molutil . clone |
11,705 | def H_donor_count ( mol ) : mol . require ( "Valence" ) return sum ( 1 for _ , a in mol . atoms_iter ( ) if a . H_donor ) | Hydrogen bond donor count |
11,706 | def H_acceptor_count ( mol ) : mol . require ( "Valence" ) return sum ( 1 for _ , a in mol . atoms_iter ( ) if a . H_acceptor ) | Hydrogen bond acceptor count |
11,707 | def rotatable_count ( mol ) : mol . require ( "Rotatable" ) return sum ( 1 for _ , _ , b in mol . bonds_iter ( ) if b . rotatable ) | Rotatable bond count |
11,708 | def rule_of_five_violation ( mol ) : v = 0 if mw ( mol ) > 500 : v += 1 if H_donor_count ( mol ) > 5 : v += 1 if H_acceptor_count ( mol ) > 10 : v += 1 try : if wclogp . wclogp ( mol ) > 5 : v += 1 except TypeError : v += 1 return v | Lipinski s rule of five violation count |
11,709 | def formula ( mol ) : mol . require ( "Valence" ) mol . require ( "Topology" ) total_cntr = Counter ( ) for m in sorted ( mols_iter ( mol ) , key = len , reverse = True ) : cntr = Counter ( ) for i in m : cntr += mol . atom ( i ) . composition ( ) text = [ ] Cs = cntr . pop ( "C" , 0 ) if Cs : text . append ( "C" ) if ... | Chemical formula . Atoms should be arranged in order of C H and other atoms . Molecules should be arranged in order of length of formula text . |
11,710 | def debug ( * args , ** attrs ) : attrs . setdefault ( "is_flag" , True ) attrs . setdefault ( "default" , None ) return option ( debug , * args , ** attrs ) | Show debugging information . |
11,711 | def dryrun ( * args , ** attrs ) : attrs . setdefault ( "is_flag" , True ) attrs . setdefault ( "default" , None ) return option ( dryrun , * args , ** attrs ) | Perform a dryrun . |
11,712 | def log ( * args , ** attrs ) : attrs . setdefault ( "metavar" , "PATH" ) attrs . setdefault ( "show_default" , False ) return option ( log , * args , ** attrs ) | Override log file location . |
11,713 | def version ( * args , ** attrs ) : if hasattr ( sys , "_getframe" ) : package = attrs . pop ( "package" , sys . _getframe ( 1 ) . f_globals . get ( "__package__" ) ) if package : attrs . setdefault ( "version" , get_version ( package ) ) return click . version_option ( * args , ** attrs ) | Show the version and exit . |
11,714 | def to_rdmol ( mol ) : rwmol = Chem . RWMol ( Chem . MolFromSmiles ( '' ) ) key_to_idx = { } bond_type = { 1 : Chem . BondType . SINGLE , 2 : Chem . BondType . DOUBLE , 3 : Chem . BondType . TRIPLE } conf = Chem . Conformer ( rwmol . GetNumAtoms ( ) ) for k , a in mol . atoms_iter ( ) : i = rwmol . AddAtom ( Chem . Ato... | Convert molecule to RDMol |
11,715 | def morgan_sim ( mol1 , mol2 , radius = 2 , digit = 3 ) : rdmol1 = to_rdmol ( mol1 ) rdmol2 = to_rdmol ( mol2 ) fp1 = AllChem . GetMorganFingerprint ( rdmol1 , radius ) fp2 = AllChem . GetMorganFingerprint ( rdmol2 , radius ) return round ( DataStructs . DiceSimilarity ( fp1 , fp2 ) , digit ) | Calculate morgan fingerprint similarity by using RDKit radius = 2 roughly equivalent to ECFP4 |
11,716 | def build ( self , X , Y , w = None , edges = None ) : super ( MergeTree , self ) . build ( X , Y , w , edges ) if self . debug : sys . stdout . write ( "Merge Tree Computation: " ) start = time . clock ( ) self . __tree = MergeTreeFloat ( vectorFloat ( self . Xnorm . flatten ( ) ) , vectorFloat ( self . Y ) , str ( se... | Assigns data to this object and builds the Merge Tree |
11,717 | def build_for_contour_tree ( self , contour_tree , negate = False ) : if self . debug : tree_type = "Join" if negate : tree_type = "Split" sys . stdout . write ( "{} Tree Computation: " . format ( tree_type ) ) start = time . clock ( ) Y = contour_tree . Y if negate : Y = - Y self . __tree = MergeTreeFloat ( vectorFloa... | A helper function that will reduce duplication of data by reusing the parent contour tree s parameters and data |
11,718 | def verify_abort ( func , * args , ** kwargs ) : expected_exception = kwargs . pop ( "expected_exception" , runez . system . AbortException ) with CaptureOutput ( ) as logged : try : value = func ( * args , ** kwargs ) assert False , "%s did not raise, but returned %s" % ( func , value ) except expected_exception : ret... | Convenient wrapper around functions that should exit or raise an exception |
11,719 | def pop ( self , strip = False ) : r = self . contents ( ) self . clear ( ) if r and strip : r = r . strip ( ) return r | Current content popped useful for testing |
11,720 | def contents ( self ) : c = self . _header [ : ] c . append ( ' font-weight="{}"' . format ( self . font_weight ) ) c . append ( ' font-family="{}"' . format ( self . font_family ) ) c . append ( ' width="{}" height="{}"' . format ( * self . screen_size ) ) sclw = self . original_size [ 0 ] * self . scale_factor sclh =... | Get svg string |
11,721 | def data_url_scheme ( self ) : encoded = base64 . b64encode ( self . contents ( ) . encode ( ) ) return "data:image/svg+xml;base64," + encoded . decode ( ) | Get svg in Data URL Scheme format . |
11,722 | def _coords_conv ( self , pos ) : px = ( self . original_size [ 0 ] / 2 + pos [ 0 ] ) * self . scale_factor py = ( self . original_size [ 1 ] / 2 - pos [ 1 ] ) * self . scale_factor return round ( px , 2 ) , round ( py , 2 ) | For Svg coordinate system reflect over X axis and translate from center to top - left |
11,723 | def get_logger ( self ) : if Global . LOGGER : Global . LOGGER . debug ( 'configuring a logger' ) if self . _logger_instance is not None : return self . _logger_instance self . _logger_instance = logging . getLogger ( "flowsLogger" ) self . _logger_instance . setLevel ( logging . DEBUG ) log_format = '%(asctime)s - [%(... | Returns the standard logger |
11,724 | def reconfigure_log_level ( self ) : if Global . LOGGER : Global . LOGGER . debug ( 'reconfiguring logger level' ) stream_handlers = filter ( lambda x : type ( x ) is logging . StreamHandler , self . _logger_instance . handlers ) for x in stream_handlers : x . level = Global . CONFIG_MANAGER . log_level return self . g... | Returns a new standard logger instance |
11,725 | def _build_toctree_node ( parent = None , entries = None , includefiles = None , caption = None ) : subnode = sphinx . addnodes . toctree ( ) subnode [ 'parent' ] = parent subnode [ 'entries' ] = entries subnode [ 'includefiles' ] = includefiles subnode [ 'caption' ] = caption subnode [ 'maxdepth' ] = 1 subnode [ 'hidd... | Factory for a toctree node . |
11,726 | def _parse_skip_option ( self ) : try : skip_text = self . options [ 'skip' ] except KeyError : return [ ] modules = [ module . strip ( ) for module in skip_text . split ( ',' ) ] return modules | Parse the skip option of skipped module names . |
11,727 | def _parse_skip_option ( self ) : try : skip_text = self . options [ 'skip' ] except KeyError : return [ ] packages = [ package . strip ( ) for package in skip_text . split ( ',' ) ] return packages | Parse the skip option of skipped package names . |
11,728 | def _set_command_line_arguments ( self , args ) : Global . LOGGER . debug ( "setting command line arguments" ) if args . VERBOSE : Global . LOGGER . debug ( "verbose mode active" ) Global . CONFIG_MANAGER . log_level = logging . DEBUG Global . LOGGER_INSTANCE . reconfigure_log_level ( ) if args . STATS > 0 : Global . L... | Set internal configuration variables according to the input parameters |
11,729 | def start ( self ) : Global . LOGGER . info ( "starting the flow manager" ) self . _start_actions ( ) self . _start_message_fetcher ( ) Global . LOGGER . debug ( "flow manager started" ) | Start all the processes |
11,730 | def stop ( self ) : Global . LOGGER . info ( "stopping the flow manager" ) self . _stop_actions ( ) self . isrunning = False Global . LOGGER . debug ( "flow manager stopped" ) | Stop all the processes |
11,731 | def restart ( self ) : Global . LOGGER . info ( "restarting the flow manager" ) self . _stop_actions ( ) self . actions = [ ] self . _start_actions ( ) Global . LOGGER . debug ( "flow manager restarted" ) | Restart all the processes |
11,732 | def _start_actions ( self ) : Global . LOGGER . info ( "starting actions" ) for recipe in Global . CONFIG_MANAGER . recipes : Global . CONFIG_MANAGER . read_recipe ( recipe ) list ( map ( lambda section : self . _start_action_for_section ( section ) , Global . CONFIG_MANAGER . sections ) ) | Start all the actions for the recipes |
11,733 | def _start_action_for_section ( self , section ) : if section == "configuration" : return Global . LOGGER . debug ( "starting actions for section " + section ) action_configuration = Global . CONFIG_MANAGER . sections [ section ] if len ( action_configuration ) == 0 : Global . LOGGER . warn ( f"section {section} has no... | Start all the actions for a particular section |
11,734 | def _stop_actions ( self ) : Global . LOGGER . info ( "stopping actions" ) list ( map ( lambda x : x . stop ( ) , self . actions ) ) Global . LOGGER . info ( "actions stopped" ) | Stop all the actions |
11,735 | def _perform_system_check ( self ) : if Global . CONFIG_MANAGER . tracing_mode : Global . LOGGER . debug ( "performing a system check" ) now = datetime . datetime . now ( ) sent = Global . MESSAGE_DISPATCHER . dispatched received = self . fetched queue_length = sent - received message_sleep_interval = Global . CONFIG_M... | Perform a system check to define if we need to throttle to handle all the incoming messages |
11,736 | def _deliver_message ( self , msg ) : my_subscribed_actions = self . subscriptions . get ( msg . sender , [ ] ) for action in my_subscribed_actions : if Global . CONFIG_MANAGER . tracing_mode : Global . LOGGER . debug ( f"delivering message to {action.name}" ) action . on_input_received ( msg ) | Deliver the message to the subscripted actions |
11,737 | def _fetch_messages ( self ) : try : [ _ , msg ] = self . socket . recv_multipart ( flags = zmq . NOBLOCK ) if Global . CONFIG_MANAGER . tracing_mode : Global . LOGGER . debug ( "fetched a new message" ) self . fetched = self . fetched + 1 obj = pickle . loads ( msg ) self . _deliver_message ( obj ) return obj except z... | Get an input message from the socket |
11,738 | async def message_fetcher_coroutine ( self , loop ) : Global . LOGGER . debug ( 'registering callbacks for message fetcher coroutine' ) self . isrunning = True while self . isrunning : loop . call_soon ( self . _fetch_messages ) loop . call_soon ( self . _perform_system_check ) await asyncio . sleep ( Global . CONFIG_M... | Register callback for message fetcher coroutines |
11,739 | def _adapt_sleep_interval ( self , sent , received , queue , now ) : Global . LOGGER . debug ( "adjusting sleep interval" ) dispatched_since_last_check = sent - self . last_queue_check_count seconds_since_last_check = ( now - self . last_queue_check_date ) . total_seconds ( ) Global . LOGGER . debug ( str ( dispatched_... | Adapt sleep time based on the number of the messages in queue |
11,740 | def _parse_input_parameters ( self ) : Global . LOGGER . debug ( "define and parsing command line arguments" ) parser = argparse . ArgumentParser ( description = 'A workflow engine for Pythonistas' , formatter_class = argparse . RawTextHelpFormatter ) parser . add_argument ( 'FILENAME' , nargs = '+' , help = 'name of t... | Set the configuration for the Logger |
11,741 | def migrate_050_to_051 ( session ) : entries_to_update = session . query ( Entry ) . filter ( Entry . forgot_sign_out . is_ ( True ) ) . filter ( Entry . time_out . isnot ( None ) ) for entry in entries_to_update : entry . time_out = None logging . info ( 'Entry updated {}' . format ( entry . uuid ) ) logging . debug (... | Set time_out field of all flagged timesheet entries to Null . |
11,742 | def get_task_param_string ( task ) : param_dict = task . to_str_params ( ) items = [ ] for key in sorted ( param_dict . keys ( ) ) : items . append ( "'{:s}': '{:s}'" . format ( key , param_dict [ key ] ) ) return "{" + ", " . join ( items ) + "}" | Get all parameters of a task as one string |
11,743 | def check_completion ( task , mark_incomplete = False , clear = False , return_stats = False ) : to_clear = dict ( ) is_complete , stats = _check_completion ( task , mark_incomplete = mark_incomplete , clear = clear , stats = { } , visited = dict ( ) , to_clear = to_clear ) while to_clear : found_clearable_task = False... | Recursively check if a task and all its requirements are complete |
11,744 | def build ( cls , local_scheduler = True , ** task_params ) : luigi . build ( [ cls ( ** task_params ) ] , local_scheduler = local_scheduler ) | Instantiate the task and build it with luigi |
11,745 | def clear ( self ) : self . mark_incomplete ( ) for object_class in self . object_classes : self . session . query ( object_class ) . delete ( ) self . close_session ( ) | Delete all objects created by this task . |
11,746 | def complete ( self ) : is_complete = super ( ORMWrapperTask , self ) . complete ( ) for req in self . requires ( ) : is_complete &= req . complete ( ) return is_complete | Task is complete if completion marker is set and all requirements are complete |
11,747 | def save ( self , filename = None ) : if filename is None : filename = "morse_smale_complex.json" with open ( filename , "w" ) as fp : fp . write ( self . to_json ( ) ) | Saves a constructed Morse - Smale Complex in json file |
11,748 | def get_label ( self , indices = None ) : if indices is None : indices = list ( range ( 0 , self . get_sample_size ( ) ) ) elif isinstance ( indices , collections . Iterable ) : indices = sorted ( list ( set ( indices ) ) ) else : indices = [ indices ] if len ( indices ) == 0 : return [ ] partitions = self . get_partit... | Returns the label pair indices requested by the user |
11,749 | def get_sample_size ( self , key = None ) : if key is None : return len ( self . Y ) else : return len ( self . get_partitions ( self . persistence ) [ key ] ) | Returns the number of samples in the input data |
11,750 | def to_json ( self ) : capsule = { } capsule [ "Hierarchy" ] = [ ] for ( dying , ( persistence , surviving , saddle ) , ) in self . merge_sequence . items ( ) : capsule [ "Hierarchy" ] . append ( { "Dying" : dying , "Persistence" : persistence , "Surviving" : surviving , "Saddle" : saddle , } ) capsule [ "Partitions" ]... | Writes the complete Morse - Smale merge hierarchy to a string object . |
11,751 | def dict_to_numpy_array ( d ) : return fromarrays ( d . values ( ) , np . dtype ( [ ( str ( k ) , v . dtype ) for k , v in d . items ( ) ] ) ) | Convert a dict of 1d array to a numpy recarray |
11,752 | def concatenate_1d ( arrays ) : if len ( arrays ) == 0 : return np . array ( [ ] ) if len ( arrays ) == 1 : return np . asanyarray ( arrays [ 0 ] ) if any ( map ( np . ma . is_masked , arrays ) ) : return np . ma . concatenate ( arrays ) return np . concatenate ( arrays ) | Concatenate 1D numpy arrays . Similar to np . concatenate but work with empty input and masked arrays . |
11,753 | def formula_html ( self , reversed_ = False ) : if self . H_count == 1 : text = "H" elif self . H_count > 1 : text = "H<sub>{}</sub>" . format ( self . H_count ) else : text = "" seq = [ self . symbol , text , self . charge_sign_html ( ) ] if reversed_ : seq = reversed ( seq ) return "" . join ( seq ) | Chemical formula HTML |
11,754 | def charge_sign ( self ) : if self . charge > 0 : sign = "+" elif self . charge < 0 : sign = "–" else : return "" ab = abs ( self . charge ) if ab > 1 : return str ( ab ) + sign return sign | Charge sign text |
11,755 | def send_message ( self , message ) : with self . _instance_lock : if message is None : Global . LOGGER . error ( "can't deliver a null messages" ) return if message . sender is None : Global . LOGGER . error ( f"can't deliver anonymous messages with body {message.body}" ) return if message . receiver is None : Global ... | Dispatch a message using 0mq |
11,756 | def update_properties_cache ( sender , instance , action , reverse , model , pk_set , ** kwargs ) : "Property cache actualization at POI save. It will not work yet after property removal." if action == 'post_add' : instance . save_properties_cache ( ) | Property cache actualization at POI save . It will not work yet after property removal . |
11,757 | def to_json ( self ) : capsule = { } capsule [ "Hierarchy" ] = [ ] for ( dying , ( persistence , surviving , saddle ) , ) in self . merge_sequence . items ( ) : capsule [ "Hierarchy" ] . append ( { "Persistence" : persistence , "Dying" : dying , "Surviving" : surviving , "Saddle" : saddle , } ) capsule [ "Partitions" ]... | Writes the complete Morse complex merge hierarchy to a string object . |
11,758 | def iter ( context , sequence , limit = 10 ) : params = { 'limit' : limit , 'offset' : 0 } uri = '%s/%s/%s' % ( context . dci_cs_api , RESOURCE , sequence ) while True : j = context . session . get ( uri , params = params ) . json ( ) if len ( j [ 'jobs_events' ] ) : for i in j [ 'jobs_events' ] : yield i else : break ... | Iter to list all the jobs events . |
11,759 | def delete ( context , sequence ) : uri = '%s/%s/%s' % ( context . dci_cs_api , RESOURCE , sequence ) return context . session . delete ( uri ) | Delete jobs events from a given sequence |
11,760 | def get_ldap ( cls , global_options = None ) : if cls . ldap is None : import ldap . filter try : import ldap . dn except ImportError : from django_auth_ldap import dn ldap . dn = dn cls . ldap = ldap if ( not cls . _ldap_configured ) and ( global_options is not None ) : for opt , value in global_options . items ( ) : ... | Returns the ldap module . The unit test harness will assign a mock object to _LDAPConfig . ldap . It is imperative that the ldap module not be imported anywhere else so that the unit tests will pass in the absence of python - ldap . |
11,761 | def _begin ( self , connection , filterargs = ( ) , escape = True ) : if escape : filterargs = self . _escape_filterargs ( filterargs ) try : filterstr = self . filterstr % filterargs msgid = connection . search ( force_str ( self . base_dn ) , self . scope , force_str ( filterstr ) ) except ldap . LDAPError as e : msg... | Begins an asynchronous search and returns the message id to retrieve the results . |
11,762 | def _results ( self , connection , msgid ) : try : kind , results = connection . result ( msgid ) if kind != ldap . RES_SEARCH_RESULT : results = [ ] except ldap . LDAPError as e : results = [ ] logger . error ( u"result(%d) raised %s" % ( msgid , pprint . pformat ( e ) ) ) return self . _process_results ( results ) | Returns the result of a previous asynchronous query . |
11,763 | def _escape_filterargs ( self , filterargs ) : if isinstance ( filterargs , tuple ) : filterargs = tuple ( self . ldap . filter . escape_filter_chars ( value ) for value in filterargs ) elif isinstance ( filterargs , dict ) : filterargs = dict ( ( key , self . ldap . filter . escape_filter_chars ( value ) ) for key , v... | Escapes values in filterargs . |
11,764 | def _process_results ( self , results ) : results = [ r for r in results if r [ 0 ] is not None ] results = _DeepStringCoder ( 'utf-8' ) . decode ( results ) results = [ ( r [ 0 ] . lower ( ) , r [ 1 ] ) for r in results ] result_dns = [ result [ 0 ] for result in results ] logger . debug ( u"search_s('%s', %d, '%s') r... | Returns a sanitized copy of raw LDAP results . This scrubs out references decodes utf8 normalizes DNs etc . |
11,765 | def get_connection_string ( params , hide_password = True ) : connection_string = params [ 'driver' ] + '://' user = params . get ( 'user' , None ) password = params . get ( 'password' , None ) host = params . get ( 'host' , None ) port = params . get ( 'port' , None ) database = params . get ( 'database' , None ) if d... | Get a database connection string |
11,766 | def pubticker ( self , symbol = 'btcusd' ) : url = self . base_url + '/v1/pubticker/' + symbol return requests . get ( url ) | Send a request for latest ticker info return the response . |
11,767 | def book ( self , symbol = 'btcusd' , limit_bids = 0 , limit_asks = 0 ) : url = self . base_url + '/v1/book/' + symbol params = { 'limit_bids' : limit_bids , 'limit_asks' : limit_asks } return requests . get ( url , params ) | Send a request to get the public order book return the response . |
11,768 | def trades ( self , symbol = 'btcusd' , since = 0 , limit_trades = 50 , include_breaks = 0 ) : url = self . base_url + '/v1/trades/' + symbol params = { 'since' : since , 'limit_trades' : limit_trades , 'include_breaks' : include_breaks } return requests . get ( url , params ) | Send a request to get all public trades return the response . |
11,769 | def auction ( self , symbol = 'btcusd' ) : url = self . base_url + '/v1/auction/' + symbol return requests . get ( url ) | Send a request for latest auction info return the response . |
11,770 | def auction_history ( self , symbol = 'btcusd' , since = 0 , limit_auction_results = 50 , include_indicative = 1 ) : url = self . base_url + '/v1/auction/' + symbol + '/history' params = { 'since' : since , 'limit_auction_results' : limit_auction_results , 'include_indicative' : include_indicative } return requests . g... | Send a request for auction history info return the response . |
11,771 | def new_order ( self , amount , price , side , client_order_id = None , symbol = 'btcusd' , type = 'exchange limit' , options = None ) : request = '/v1/order/new' url = self . base_url + request params = { 'request' : request , 'nonce' : self . get_nonce ( ) , 'symbol' : symbol , 'amount' : amount , 'price' : price , '... | Send a request to place an order return the response . |
11,772 | def cancel_order ( self , order_id ) : request = '/v1/order/cancel' url = self . base_url + request params = { 'request' : request , 'nonce' : self . get_nonce ( ) , 'order_id' : order_id } return requests . post ( url , headers = self . prepare ( params ) ) | Send a request to cancel an order return the response . |
11,773 | def past_trades ( self , symbol = 'btcusd' , limit_trades = 50 , timestamp = 0 ) : request = '/v1/mytrades' url = self . base_url + request params = { 'request' : request , 'nonce' : self . get_nonce ( ) , 'symbol' : symbol , 'limit_trades' : limit_trades , 'timestamp' : timestamp } return requests . post ( url , heade... | Send a trade history request return the response . |
11,774 | def tradevolume ( self ) : request = '/v1/tradevolume' url = self . base_url + request params = { 'request' : request , 'nonce' : self . get_nonce ( ) } return requests . post ( url , headers = self . prepare ( params ) ) | Send a request to get your trade volume return the response . |
11,775 | def newAddress ( self , currency = 'btc' , label = '' ) : request = '/v1/deposit/' + currency + '/newAddress' url = self . base_url + request params = { 'request' : request , 'nonce' : self . get_nonce ( ) } if label != '' : params [ 'label' ] = label return requests . post ( url , headers = self . prepare ( params ) ) | Send a request for a new cryptocurrency deposit address with an optional label . Return the response . |
11,776 | def prepare ( self , params ) : jsonparams = json . dumps ( params ) payload = base64 . b64encode ( jsonparams . encode ( ) ) signature = hmac . new ( self . secret_key . encode ( ) , payload , hashlib . sha384 ) . hexdigest ( ) return { 'X-GEMINI-APIKEY' : self . api_key , 'X-GEMINI-PAYLOAD' : payload , 'X-GEMINI-SIGN... | Prepare return the required HTTP headers . |
11,777 | def merge ( cls , source_blocks ) : if len ( source_blocks ) == 1 : return source_blocks [ 0 ] source_blocks . sort ( key = operator . attrgetter ( 'start_line_number' ) ) main_block = source_blocks [ 0 ] boot_lines = main_block . boot_lines source_lines = [ source_line for source_block in source_blocks for source_line... | Merge multiple SourceBlocks together |
11,778 | def character_summary_table ( ) : cl = client . get_client ( ) session = cl . create_session ( ) query = session . query ( models . Character , models . Universe . name . label ( 'universe' ) , models . Place . name . label ( 'place_of_birth' ) ) . join ( models . Character . universe ) . outerjoin ( models . Character... | Export a table listing all characters and their data |
11,779 | def fig_to_svg ( fig ) : buf = io . StringIO ( ) fig . savefig ( buf , format = 'svg' ) buf . seek ( 0 ) return buf . getvalue ( ) | Helper function to convert matplotlib figure to SVG string |
11,780 | def movie_network ( ) : template = jenv . get_template ( "movie_network.html" ) context = dict ( ) cl = client . get_client ( ) session = cl . create_session ( ) query = session . query ( models . Movie . id , models . Movie . name , models . Movie . url , models . Movie . budget_inflation_adjusted , models . Movie . i... | Generate interactive network graph of movie appearances |
11,781 | def unpack2D ( _x ) : _x = np . atleast_2d ( _x ) x = _x [ : , 0 ] y = _x [ : , 1 ] return x , y | Helper function for splitting 2D data into x and y component to make equations simpler |
11,782 | def is_at_exit ( ) : if _threading_main_thread is not None : if not hasattr ( threading , "main_thread" ) : return True if threading . main_thread ( ) != _threading_main_thread : return True if not _threading_main_thread . is_alive ( ) : return True return False | Some heuristics to figure out whether this is called at a stage where the Python interpreter is shutting down . |
11,783 | def better_exchook ( etype , value , tb , debugshell = False , autodebugshell = True , file = None , with_color = None ) : if file is None : file = sys . stderr def output ( ln ) : file . write ( ln + "\n" ) color = Color ( enable = with_color ) output ( color ( "EXCEPTION" , color . fg_colors [ 1 ] , bold = True ) ) a... | Replacement for sys . excepthook . |
11,784 | def dump_all_thread_tracebacks ( exclude_thread_ids = None , file = None ) : if exclude_thread_ids is None : exclude_thread_ids = [ ] if not file : file = sys . stdout import threading if hasattr ( sys , "_current_frames" ) : print ( "" , file = file ) threads = { t . ident : t for t in threading . enumerate ( ) } for ... | Prints the traceback of all threads . |
11,785 | def _main ( ) : if sys . argv [ 1 : ] == [ "test" ] : for k , v in sorted ( globals ( ) . items ( ) ) : if not k . startswith ( "test_" ) : continue print ( "running: %s()" % k ) v ( ) print ( "ok." ) sys . exit ( ) elif sys . argv [ 1 : ] == [ "debug_shell" ] : debug_shell ( locals ( ) , globals ( ) ) sys . exit ( ) e... | Some demo . |
11,786 | def verify_mid_signature ( certificate_data , sp_challenge , response_challenge , signature ) : if not response_challenge . startswith ( sp_challenge ) : return False try : key = RSA . importKey ( certificate_data ) verifier = PKCS1_v1_5 . new ( key ) except ValueError : key = ECC . import_key ( certificate_data ) veri... | Verify mobile id Authentication signature is valid |
11,787 | def drive ( self , event , * args ) : maps = self . base . get ( event , self . step ) for handle , data in maps [ : ] : params = args + data try : handle ( self , * params ) except Stop : break except StopIteration : pass except Kill as Root : raise except Erase : maps . remove ( ( handle , data ) ) except Exception a... | Used to dispatch events . |
11,788 | def send ( self , data ) : self . stdin . write ( data ) self . stdin . flush ( ) | Send data to the child process through . |
11,789 | def _simplify_arguments ( arguments ) : if len ( arguments . args ) == 0 : return arguments . kwargs elif len ( arguments . kwargs ) == 0 : return arguments . args else : return arguments | If positional or keyword arguments are empty return only one or the other . |
11,790 | def load ( self ) : hdf_filename = os . path . join ( self . _dump_dirname , 'result.h5' ) if os . path . isfile ( hdf_filename ) : store = pd . HDFStore ( hdf_filename , mode = 'r' ) keys = store . keys ( ) if keys == [ '/df' ] : self . result = store [ 'df' ] else : if set ( keys ) == set ( map ( lambda i : '/%s' % i... | Load this step s result from its dump directory |
11,791 | def setup_dump ( self ) : dumpdir = self . _dump_dirname if not os . path . isdir ( dumpdir ) : os . makedirs ( dumpdir ) dump = False yaml_filename = self . _yaml_filename if not os . path . isfile ( yaml_filename ) : dump = True else : with open ( yaml_filename ) as f : if f . read ( ) != yaml . dump ( self ) : loggi... | Set up dump creating directories and writing step . yaml file containing yaml dump of this step . |
11,792 | def main ( ctx , root_dir , verbose ) : root_dir = discover_package_doc_dir ( root_dir ) ctx . obj = { 'root_dir' : root_dir , 'verbose' : verbose } if verbose : log_level = logging . DEBUG else : log_level = logging . INFO logger = logging . getLogger ( 'documenteer' ) logger . addHandler ( logging . StreamHandler ( )... | package - docs is a CLI for building single - package previews of documentation in the LSST Stack . |
11,793 | def apply_and_name ( self , aggregator ) : reduced_df = self . _apply ( aggregator ) if len ( self . names ) != len ( reduced_df . columns ) : raise IndexError ( "ColumnFunction creates more columns than it has names for." ) reduced_df . columns = self . names return reduced_df | Fetches the row - aggregated input columns for this ColumnFunction . |
11,794 | def aggregate ( self , index ) : if isinstance ( index , string_types ) : col_df_grouped = self . col_df . groupby ( self . df [ index ] ) else : self . col_df . index = pd . MultiIndex . from_arrays ( [ self . df [ i ] for i in index ] ) col_df_grouped = self . col_df . groupby ( level = index ) self . col_df . index ... | Performs a groupby of the unique Columns by index as constructed from self . df . |
11,795 | def _apply ( self , aggregator ) : reduced_dfs = [ ] if self . include_fraction : n_df = self . numerator . apply_and_name ( aggregator ) d_df = self . denominator . apply_and_name ( aggregator ) reduced_dfs . extend ( [ n_df [ cn ] / d_df [ cd ] for cn , cd in product ( n_df . columns , d_df . columns ) ] ) if self . ... | Returns a dataframe with the requested ColumnReductions . |
11,796 | def clone ( self , ** kwargs ) : init_kwargs = { "name" : self . __name , "dataframe" : self . __df , "include_columns" : self . __include_columns , "include_index" : self . __include_index , "style" : self . __style , "column_styles" : self . __col_styles , "column_widths" : self . __column_widths , "row_styles" : sel... | Create a clone of the Table optionally with some properties changed |
11,797 | def inspect ( lines ) : labels = set ( ) count = 0 exp = re . compile ( r">.*?<([\w ]+)>" ) valid = False for line in lines : if line . startswith ( "M END\n" ) : valid = True elif line . startswith ( "$$$$" ) : count += 1 valid = False else : result = exp . match ( line ) if result : labels . add ( result . group ( 1... | Inspect SDFile list of string |
11,798 | def inspect_file ( path ) : with open ( path , 'rb' ) as f : labels , count = inspect ( tx . decode ( line ) for line in f ) return labels , count | Inspect SDFile structure |
11,799 | def optional_data ( lines ) : data = { } exp = re . compile ( r">.*?<([\w ]+)>" ) for i , line in enumerate ( lines ) : result = exp . match ( line ) if result : data [ result . group ( 1 ) ] = lines [ i + 1 ] return data | Parse SDFile data part into dict |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.