idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
11,700
def make_python_xref_nodes ( py_typestr , state , hide_namespace = False ) : if hide_namespace : template = ':py:obj:`~{}`\n' else : template = ':py:obj:`{}`\n' xref_text = template . format ( py_typestr ) return parse_rst_content ( xref_text , state )
Make docutils nodes containing a cross - reference to a Python object .
11,701
def make_python_xref_nodes_for_type ( py_type , state , hide_namespace = False ) : if py_type . __module__ == 'builtins' : typestr = py_type . __name__ else : typestr = '.' . join ( ( py_type . __module__ , py_type . __name__ ) ) return make_python_xref_nodes ( typestr , state , hide_namespace = hide_namespace )
Make docutils nodes containing a cross - reference to a Python object given the object s type .
11,702
def make_section ( section_id = None , contents = None ) : section = nodes . section ( ) section [ 'ids' ] . append ( nodes . make_id ( section_id ) ) section [ 'names' ] . append ( section_id ) if contents is not None : section . extend ( contents ) return section
Make a docutils section node .
11,703
def split_role_content ( role_rawsource ) : parts = { 'last_component' : False , 'display' : None , 'ref' : None } if role_rawsource . startswith ( '~' ) : parts [ 'last_component' ] = True role_rawsource = role_rawsource . lstrip ( '~' ) match = ROLE_DISPLAY_PATTERN . match ( role_rawsource ) if match : parts [ 'display' ] = match . group ( 'display' ) . strip ( ) parts [ 'ref' ] = match . group ( 'reference' ) . strip ( ) else : parts [ 'display' ] = None parts [ 'ref' ] = role_rawsource . strip ( ) return parts
Split the rawsource of a role into standard components .
11,704
def largest_graph ( mol ) : mol . require ( "Valence" ) mol . require ( "Topology" ) m = clone ( mol ) if m . isolated : for k in itertools . chain . from_iterable ( m . isolated ) : m . remove_atom ( k ) return m
Return a molecule which has largest graph in the compound Passing single molecule object will results as same as molutil . clone
11,705
def H_donor_count ( mol ) : mol . require ( "Valence" ) return sum ( 1 for _ , a in mol . atoms_iter ( ) if a . H_donor )
Hydrogen bond donor count
11,706
def H_acceptor_count ( mol ) : mol . require ( "Valence" ) return sum ( 1 for _ , a in mol . atoms_iter ( ) if a . H_acceptor )
Hydrogen bond acceptor count
11,707
def rotatable_count ( mol ) : mol . require ( "Rotatable" ) return sum ( 1 for _ , _ , b in mol . bonds_iter ( ) if b . rotatable )
Rotatable bond count
11,708
def rule_of_five_violation ( mol ) : v = 0 if mw ( mol ) > 500 : v += 1 if H_donor_count ( mol ) > 5 : v += 1 if H_acceptor_count ( mol ) > 10 : v += 1 try : if wclogp . wclogp ( mol ) > 5 : v += 1 except TypeError : v += 1 return v
Lipinski s rule of five violation count
11,709
def formula ( mol ) : mol . require ( "Valence" ) mol . require ( "Topology" ) total_cntr = Counter ( ) for m in sorted ( mols_iter ( mol ) , key = len , reverse = True ) : cntr = Counter ( ) for i in m : cntr += mol . atom ( i ) . composition ( ) text = [ ] Cs = cntr . pop ( "C" , 0 ) if Cs : text . append ( "C" ) if Cs > 1 : text . append ( str ( Cs ) ) Hs = cntr . pop ( "H" , 0 ) if Hs : text . append ( "H" ) if Hs > 1 : text . append ( str ( Hs ) ) heteros = sorted ( cntr . items ( ) , key = lambda x : atom_number ( x [ 0 ] ) ) for k , v in heteros : text . append ( k ) if v > 1 : text . append ( str ( v ) ) total_cntr [ "" . join ( text ) ] += 1 total = sorted ( total_cntr . items ( ) , key = lambda x : len ( x [ 0 ] ) , reverse = True ) total_text = [ ] for k , v in total : if v > 1 : total_text . append ( str ( v ) + k ) else : total_text . append ( k ) return "." . join ( total_text )
Chemical formula . Atoms should be arranged in order of C H and other atoms . Molecules should be arranged in order of length of formula text .
11,710
def debug ( * args , ** attrs ) : attrs . setdefault ( "is_flag" , True ) attrs . setdefault ( "default" , None ) return option ( debug , * args , ** attrs )
Show debugging information .
11,711
def dryrun ( * args , ** attrs ) : attrs . setdefault ( "is_flag" , True ) attrs . setdefault ( "default" , None ) return option ( dryrun , * args , ** attrs )
Perform a dryrun .
11,712
def log ( * args , ** attrs ) : attrs . setdefault ( "metavar" , "PATH" ) attrs . setdefault ( "show_default" , False ) return option ( log , * args , ** attrs )
Override log file location .
11,713
def version ( * args , ** attrs ) : if hasattr ( sys , "_getframe" ) : package = attrs . pop ( "package" , sys . _getframe ( 1 ) . f_globals . get ( "__package__" ) ) if package : attrs . setdefault ( "version" , get_version ( package ) ) return click . version_option ( * args , ** attrs )
Show the version and exit .
11,714
def to_rdmol ( mol ) : rwmol = Chem . RWMol ( Chem . MolFromSmiles ( '' ) ) key_to_idx = { } bond_type = { 1 : Chem . BondType . SINGLE , 2 : Chem . BondType . DOUBLE , 3 : Chem . BondType . TRIPLE } conf = Chem . Conformer ( rwmol . GetNumAtoms ( ) ) for k , a in mol . atoms_iter ( ) : i = rwmol . AddAtom ( Chem . Atom ( atom_number ( a . symbol ) ) ) key_to_idx [ k ] = i conf . SetAtomPosition ( i , a . coords ) rwmol . AddConformer ( conf ) for u , v , b in mol . bonds_iter ( ) : ui = key_to_idx [ u ] vi = key_to_idx [ v ] rwmol . AddBond ( ui , vi , bond_type [ b . order ] ) Chem . GetSSSR ( rwmol ) rwmol . UpdatePropertyCache ( strict = False ) return rwmol . GetMol ( )
Convert molecule to RDMol
11,715
def morgan_sim ( mol1 , mol2 , radius = 2 , digit = 3 ) : rdmol1 = to_rdmol ( mol1 ) rdmol2 = to_rdmol ( mol2 ) fp1 = AllChem . GetMorganFingerprint ( rdmol1 , radius ) fp2 = AllChem . GetMorganFingerprint ( rdmol2 , radius ) return round ( DataStructs . DiceSimilarity ( fp1 , fp2 ) , digit )
Calculate morgan fingerprint similarity by using RDKit radius = 2 roughly equivalent to ECFP4
11,716
def build ( self , X , Y , w = None , edges = None ) : super ( MergeTree , self ) . build ( X , Y , w , edges ) if self . debug : sys . stdout . write ( "Merge Tree Computation: " ) start = time . clock ( ) self . __tree = MergeTreeFloat ( vectorFloat ( self . Xnorm . flatten ( ) ) , vectorFloat ( self . Y ) , str ( self . gradient ) , self . graph_rep . full_graph ( ) , self . debug , ) self . _internal_build ( ) if self . debug : end = time . clock ( ) sys . stdout . write ( "%f s\n" % ( end - start ) )
Assigns data to this object and builds the Merge Tree
11,717
def build_for_contour_tree ( self , contour_tree , negate = False ) : if self . debug : tree_type = "Join" if negate : tree_type = "Split" sys . stdout . write ( "{} Tree Computation: " . format ( tree_type ) ) start = time . clock ( ) Y = contour_tree . Y if negate : Y = - Y self . __tree = MergeTreeFloat ( vectorFloat ( contour_tree . Xnorm . flatten ( ) ) , vectorFloat ( Y ) , str ( contour_tree . gradient ) , contour_tree . graph_rep . full_graph ( ) , self . debug , ) self . _internal_build ( ) if self . debug : end = time . clock ( ) sys . stdout . write ( "%f s\n" % ( end - start ) )
A helper function that will reduce duplication of data by reusing the parent contour tree s parameters and data
11,718
def verify_abort ( func , * args , ** kwargs ) : expected_exception = kwargs . pop ( "expected_exception" , runez . system . AbortException ) with CaptureOutput ( ) as logged : try : value = func ( * args , ** kwargs ) assert False , "%s did not raise, but returned %s" % ( func , value ) except expected_exception : return str ( logged )
Convenient wrapper around functions that should exit or raise an exception
11,719
def pop ( self , strip = False ) : r = self . contents ( ) self . clear ( ) if r and strip : r = r . strip ( ) return r
Current content popped useful for testing
11,720
def contents ( self ) : c = self . _header [ : ] c . append ( ' font-weight="{}"' . format ( self . font_weight ) ) c . append ( ' font-family="{}"' . format ( self . font_family ) ) c . append ( ' width="{}" height="{}"' . format ( * self . screen_size ) ) sclw = self . original_size [ 0 ] * self . scale_factor sclh = self . original_size [ 1 ] * self . scale_factor longside = max ( [ sclw , sclh ] ) width = round ( longside + self . margin * 2 , 2 ) height = round ( longside + self . margin * 2 , 2 ) xleft = round ( - self . margin - ( longside - sclw ) / 2 , 2 ) ytop = round ( - self . margin - ( longside - sclh ) / 2 , 2 ) c . append ( ' viewBox="{} {} {} {}">\n' . format ( xleft , ytop , width , height ) ) if self . bgcolor is not None : c . append ( '<rect x="{}", y="{}" width="{}" height="{}" fill="{}" \ />\n' . format ( xleft , ytop , width , height , self . bgcolor ) ) c . extend ( self . _elems ) c . append ( "</svg>" ) return "" . join ( c )
Get svg string
11,721
def data_url_scheme ( self ) : encoded = base64 . b64encode ( self . contents ( ) . encode ( ) ) return "data:image/svg+xml;base64," + encoded . decode ( )
Get svg in Data URL Scheme format .
11,722
def _coords_conv ( self , pos ) : px = ( self . original_size [ 0 ] / 2 + pos [ 0 ] ) * self . scale_factor py = ( self . original_size [ 1 ] / 2 - pos [ 1 ] ) * self . scale_factor return round ( px , 2 ) , round ( py , 2 )
For Svg coordinate system reflect over X axis and translate from center to top - left
11,723
def get_logger ( self ) : if Global . LOGGER : Global . LOGGER . debug ( 'configuring a logger' ) if self . _logger_instance is not None : return self . _logger_instance self . _logger_instance = logging . getLogger ( "flowsLogger" ) self . _logger_instance . setLevel ( logging . DEBUG ) log_format = '%(asctime)s - [%(levelname)s]|%(thread)d\t%(message)s' log_date_format = '%Y-%m-%d %H:%M:%S' formatter = logging . Formatter ( log_format , log_date_format ) new_log_stream_handler = logging . StreamHandler ( ) new_log_stream_handler . setFormatter ( formatter ) new_log_stream_handler . setLevel ( logging . INFO ) self . _logger_instance . addHandler ( new_log_stream_handler ) return self . _logger_instance
Returns the standard logger
11,724
def reconfigure_log_level ( self ) : if Global . LOGGER : Global . LOGGER . debug ( 'reconfiguring logger level' ) stream_handlers = filter ( lambda x : type ( x ) is logging . StreamHandler , self . _logger_instance . handlers ) for x in stream_handlers : x . level = Global . CONFIG_MANAGER . log_level return self . get_logger ( )
Returns a new standard logger instance
11,725
def _build_toctree_node ( parent = None , entries = None , includefiles = None , caption = None ) : subnode = sphinx . addnodes . toctree ( ) subnode [ 'parent' ] = parent subnode [ 'entries' ] = entries subnode [ 'includefiles' ] = includefiles subnode [ 'caption' ] = caption subnode [ 'maxdepth' ] = 1 subnode [ 'hidden' ] = False subnode [ 'glob' ] = None subnode [ 'hidden' ] = False subnode [ 'includehidden' ] = False subnode [ 'numbered' ] = 0 subnode [ 'titlesonly' ] = False return subnode
Factory for a toctree node .
11,726
def _parse_skip_option ( self ) : try : skip_text = self . options [ 'skip' ] except KeyError : return [ ] modules = [ module . strip ( ) for module in skip_text . split ( ',' ) ] return modules
Parse the skip option of skipped module names .
11,727
def _parse_skip_option ( self ) : try : skip_text = self . options [ 'skip' ] except KeyError : return [ ] packages = [ package . strip ( ) for package in skip_text . split ( ',' ) ] return packages
Parse the skip option of skipped package names .
11,728
def _set_command_line_arguments ( self , args ) : Global . LOGGER . debug ( "setting command line arguments" ) if args . VERBOSE : Global . LOGGER . debug ( "verbose mode active" ) Global . CONFIG_MANAGER . log_level = logging . DEBUG Global . LOGGER_INSTANCE . reconfigure_log_level ( ) if args . STATS > 0 : Global . LOGGER . debug ( f"stats requested every {args.STATS} seconds" ) Global . CONFIG_MANAGER . show_stats = True Global . CONFIG_MANAGER . stats_timeout = args . STATS if args . INTERVAL > 0 : Global . LOGGER . debug ( f"setting sleep interval to {args.INTERVAL} milliseconds" ) Global . CONFIG_MANAGER . sleep_interval = float ( args . INTERVAL ) / 1000 if args . TRACE : Global . LOGGER . debug ( "tracing mode active" ) Global . CONFIG_MANAGER . tracing_mode = True Global . CONFIG_MANAGER . log_level = logging . DEBUG Global . LOGGER_INSTANCE . reconfigure_log_level ( ) if args . MESSAGEINTERVAL is not None and args . MESSAGEINTERVAL > 0 : Global . LOGGER . debug ( f"setting message fetcher sleep interval to {args.MESSAGEINTERVAL/10} milliseconds" ) Global . CONFIG_MANAGER . message_fetcher_sleep_interval = float ( args . MESSAGEINTERVAL ) / 10000 Global . CONFIG_MANAGER . fixed_message_fetcher_interval = True Global . LOGGER . debug ( f"recipes to be parsed: {args.FILENAME}" ) Global . CONFIG_MANAGER . recipes = ( args . FILENAME )
Set internal configuration variables according to the input parameters
11,729
def start ( self ) : Global . LOGGER . info ( "starting the flow manager" ) self . _start_actions ( ) self . _start_message_fetcher ( ) Global . LOGGER . debug ( "flow manager started" )
Start all the processes
11,730
def stop ( self ) : Global . LOGGER . info ( "stopping the flow manager" ) self . _stop_actions ( ) self . isrunning = False Global . LOGGER . debug ( "flow manager stopped" )
Stop all the processes
11,731
def restart ( self ) : Global . LOGGER . info ( "restarting the flow manager" ) self . _stop_actions ( ) self . actions = [ ] self . _start_actions ( ) Global . LOGGER . debug ( "flow manager restarted" )
Restart all the processes
11,732
def _start_actions ( self ) : Global . LOGGER . info ( "starting actions" ) for recipe in Global . CONFIG_MANAGER . recipes : Global . CONFIG_MANAGER . read_recipe ( recipe ) list ( map ( lambda section : self . _start_action_for_section ( section ) , Global . CONFIG_MANAGER . sections ) )
Start all the actions for the recipes
11,733
def _start_action_for_section ( self , section ) : if section == "configuration" : return Global . LOGGER . debug ( "starting actions for section " + section ) action_configuration = Global . CONFIG_MANAGER . sections [ section ] if len ( action_configuration ) == 0 : Global . LOGGER . warn ( f"section {section} has no configuration, skipping" ) return action_type = None new_managed_input = [ ] if "type" in action_configuration : action_type = action_configuration [ "type" ] if "input" in action_configuration : action_input = action_configuration [ "input" ] new_managed_input = ( item . strip ( ) for item in action_input . split ( "," ) ) my_action = Action . create_action_for_code ( action_type , section , action_configuration , list ( new_managed_input ) ) if not my_action : Global . LOGGER . warn ( f"can't find a type for action {section}, the action will be skipped" ) return self . actions . append ( my_action ) Global . LOGGER . debug ( "updating the subscriptions table" ) for my_input in my_action . monitored_input : self . subscriptions . setdefault ( my_input , [ ] ) . append ( my_action )
Start all the actions for a particular section
11,734
def _stop_actions ( self ) : Global . LOGGER . info ( "stopping actions" ) list ( map ( lambda x : x . stop ( ) , self . actions ) ) Global . LOGGER . info ( "actions stopped" )
Stop all the actions
11,735
def _perform_system_check ( self ) : if Global . CONFIG_MANAGER . tracing_mode : Global . LOGGER . debug ( "performing a system check" ) now = datetime . datetime . now ( ) sent = Global . MESSAGE_DISPATCHER . dispatched received = self . fetched queue_length = sent - received message_sleep_interval = Global . CONFIG_MANAGER . message_fetcher_sleep_interval if Global . CONFIG_MANAGER . show_stats : if ( now - self . last_stats_check_date ) . total_seconds ( ) > Global . CONFIG_MANAGER . stats_timeout : self . last_stats_check_date = now stats_string = f"showing stats\n--- [STATS] ---\nMessage Sent: {sent}\nMessage Received: {received}\nMessage Sleep Interval = {message_sleep_interval}\nQueue length = {queue_length}\n--- [ END ] ---" Global . LOGGER . info ( stats_string ) messages_limit_reached = sent - self . last_queue_check_count > Global . CONFIG_MANAGER . messages_dispatched_for_system_check queue_limit_reached = queue_length > Global . CONFIG_MANAGER . queue_length_for_system_check time_limit_since_last_check_is_over = ( now - self . last_queue_check_date ) . total_seconds ( ) > Global . CONFIG_MANAGER . seconds_between_queue_check if not Global . CONFIG_MANAGER . fixed_message_fetcher_interval : if ( messages_limit_reached ) or ( queue_limit_reached and time_limit_since_last_check_is_over ) : cause = "messages limit reached" if messages_limit_reached else "queue limit reached" Global . LOGGER . debug ( f"triggering the throttle function due to {cause}" ) self . _adapt_sleep_interval ( sent , received , queue_length , now )
Perform a system check to define if we need to throttle to handle all the incoming messages
11,736
def _deliver_message ( self , msg ) : my_subscribed_actions = self . subscriptions . get ( msg . sender , [ ] ) for action in my_subscribed_actions : if Global . CONFIG_MANAGER . tracing_mode : Global . LOGGER . debug ( f"delivering message to {action.name}" ) action . on_input_received ( msg )
Deliver the message to the subscripted actions
11,737
def _fetch_messages ( self ) : try : [ _ , msg ] = self . socket . recv_multipart ( flags = zmq . NOBLOCK ) if Global . CONFIG_MANAGER . tracing_mode : Global . LOGGER . debug ( "fetched a new message" ) self . fetched = self . fetched + 1 obj = pickle . loads ( msg ) self . _deliver_message ( obj ) return obj except zmq . error . Again : return None except Exception as new_exception : Global . LOGGER . error ( new_exception ) raise new_exception
Get an input message from the socket
11,738
async def message_fetcher_coroutine ( self , loop ) : Global . LOGGER . debug ( 'registering callbacks for message fetcher coroutine' ) self . isrunning = True while self . isrunning : loop . call_soon ( self . _fetch_messages ) loop . call_soon ( self . _perform_system_check ) await asyncio . sleep ( Global . CONFIG_MANAGER . message_fetcher_sleep_interval ) Global . LOGGER . debug ( 'message fetcher stopped' )
Register callback for message fetcher coroutines
11,739
def _adapt_sleep_interval ( self , sent , received , queue , now ) : Global . LOGGER . debug ( "adjusting sleep interval" ) dispatched_since_last_check = sent - self . last_queue_check_count seconds_since_last_check = ( now - self . last_queue_check_date ) . total_seconds ( ) Global . LOGGER . debug ( str ( dispatched_since_last_check ) + " dispatched in the last " + str ( seconds_since_last_check ) ) sleep_time = ( seconds_since_last_check / ( dispatched_since_last_check + queue + 1 ) ) * 0.75 if sleep_time > 0.5 : sleep_time = 0.5 if sleep_time < 0.0001 : sleep_time = 0.0001 self . last_queue_check_date = now self . last_queue_check_count = sent Global . CONFIG_MANAGER . message_fetcher_sleep_interval = sleep_time sleep_interval_log_string = f"new sleep_interval = {sleep_time}" Global . LOGGER . debug ( sleep_interval_log_string ) if Global . CONFIG_MANAGER . show_stats : Global . LOGGER . info ( sleep_interval_log_string )
Adapt sleep time based on the number of the messages in queue
11,740
def _parse_input_parameters ( self ) : Global . LOGGER . debug ( "define and parsing command line arguments" ) parser = argparse . ArgumentParser ( description = 'A workflow engine for Pythonistas' , formatter_class = argparse . RawTextHelpFormatter ) parser . add_argument ( 'FILENAME' , nargs = '+' , help = 'name of the recipe file(s)' ) parser . add_argument ( '-i' , '--INTERVAL' , type = int , default = 500 , metavar = ( 'MS' ) , help = 'perform a cycle each [MS] milliseconds. (default = 500)' ) parser . add_argument ( '-m' , '--MESSAGEINTERVAL' , type = int , metavar = ( 'X' ) , help = 'dequeue a message each [X] tenth of milliseconds. (default = auto)' ) parser . add_argument ( '-s' , '--STATS' , type = int , default = 0 , metavar = ( 'SEC' ) , help = 'show stats each [SEC] seconds. (default = NO STATS)' ) parser . add_argument ( '-t' , '--TRACE' , action = 'store_true' , help = 'enable super verbose output, only useful for tracing' ) parser . add_argument ( '-v' , '--VERBOSE' , action = 'store_true' , help = 'enable verbose output' ) parser . add_argument ( '-V' , '--VERSION' , action = "version" , version = __version__ ) args = parser . parse_args ( ) return args
Set the configuration for the Logger
11,741
def migrate_050_to_051 ( session ) : entries_to_update = session . query ( Entry ) . filter ( Entry . forgot_sign_out . is_ ( True ) ) . filter ( Entry . time_out . isnot ( None ) ) for entry in entries_to_update : entry . time_out = None logging . info ( 'Entry updated {}' . format ( entry . uuid ) ) logging . debug ( entry . uuid ) session . add ( entry )
Set time_out field of all flagged timesheet entries to Null .
11,742
def get_task_param_string ( task ) : param_dict = task . to_str_params ( ) items = [ ] for key in sorted ( param_dict . keys ( ) ) : items . append ( "'{:s}': '{:s}'" . format ( key , param_dict [ key ] ) ) return "{" + ", " . join ( items ) + "}"
Get all parameters of a task as one string
11,743
def check_completion ( task , mark_incomplete = False , clear = False , return_stats = False ) : to_clear = dict ( ) is_complete , stats = _check_completion ( task , mark_incomplete = mark_incomplete , clear = clear , stats = { } , visited = dict ( ) , to_clear = to_clear ) while to_clear : found_clearable_task = False for task_id in list ( to_clear . keys ( ) ) : v = to_clear [ task_id ] if not v [ 'required_by' ] : found_clearable_task = True task = v [ 'task' ] if isinstance ( task , ORMTask ) : task . mark_incomplete ( ) task . clear ( ) _increment_stats ( stats , 'Cleared' ) config . logger . info ( "Cleared task: " + task_id ) else : config . logger . info ( 'Cannot clear task, not an ORMTask: ' + task_id ) del to_clear [ task_id ] for w in to_clear . values ( ) : w [ 'required_by' ] . discard ( task_id ) if not found_clearable_task : raise RuntimeError ( "Error in recursive task clearing, no clearable task found" ) config . logger . info ( "Task completion checking, summary:\n" + str ( stats ) ) if return_stats : return is_complete , stats else : return is_complete
Recursively check if a task and all its requirements are complete
11,744
def build ( cls , local_scheduler = True , ** task_params ) : luigi . build ( [ cls ( ** task_params ) ] , local_scheduler = local_scheduler )
Instantiate the task and build it with luigi
11,745
def clear ( self ) : self . mark_incomplete ( ) for object_class in self . object_classes : self . session . query ( object_class ) . delete ( ) self . close_session ( )
Delete all objects created by this task .
11,746
def complete ( self ) : is_complete = super ( ORMWrapperTask , self ) . complete ( ) for req in self . requires ( ) : is_complete &= req . complete ( ) return is_complete
Task is complete if completion marker is set and all requirements are complete
11,747
def save ( self , filename = None ) : if filename is None : filename = "morse_smale_complex.json" with open ( filename , "w" ) as fp : fp . write ( self . to_json ( ) )
Saves a constructed Morse - Smale Complex in json file
11,748
def get_label ( self , indices = None ) : if indices is None : indices = list ( range ( 0 , self . get_sample_size ( ) ) ) elif isinstance ( indices , collections . Iterable ) : indices = sorted ( list ( set ( indices ) ) ) else : indices = [ indices ] if len ( indices ) == 0 : return [ ] partitions = self . get_partitions ( self . persistence ) labels = self . X . shape [ 0 ] * [ None ] for label , partition_indices in partitions . items ( ) : for idx in np . intersect1d ( partition_indices , indices ) : labels [ idx ] = label labels = np . array ( labels ) if len ( indices ) == 1 : return labels [ indices ] [ 0 ] return labels [ indices ]
Returns the label pair indices requested by the user
11,749
def get_sample_size ( self , key = None ) : if key is None : return len ( self . Y ) else : return len ( self . get_partitions ( self . persistence ) [ key ] )
Returns the number of samples in the input data
11,750
def to_json ( self ) : capsule = { } capsule [ "Hierarchy" ] = [ ] for ( dying , ( persistence , surviving , saddle ) , ) in self . merge_sequence . items ( ) : capsule [ "Hierarchy" ] . append ( { "Dying" : dying , "Persistence" : persistence , "Surviving" : surviving , "Saddle" : saddle , } ) capsule [ "Partitions" ] = [ ] base = np . array ( [ None , None ] * len ( self . Y ) ) . reshape ( - 1 , 2 ) for ( min_index , max_index ) , items in self . base_partitions . items ( ) : base [ items , : ] = [ min_index , max_index ] capsule [ "Partitions" ] = base . tolist ( ) return json . dumps ( capsule )
Writes the complete Morse - Smale merge hierarchy to a string object .
11,751
def dict_to_numpy_array ( d ) : return fromarrays ( d . values ( ) , np . dtype ( [ ( str ( k ) , v . dtype ) for k , v in d . items ( ) ] ) )
Convert a dict of 1d array to a numpy recarray
11,752
def concatenate_1d ( arrays ) : if len ( arrays ) == 0 : return np . array ( [ ] ) if len ( arrays ) == 1 : return np . asanyarray ( arrays [ 0 ] ) if any ( map ( np . ma . is_masked , arrays ) ) : return np . ma . concatenate ( arrays ) return np . concatenate ( arrays )
Concatenate 1D numpy arrays . Similar to np . concatenate but work with empty input and masked arrays .
11,753
def formula_html ( self , reversed_ = False ) : if self . H_count == 1 : text = "H" elif self . H_count > 1 : text = "H<sub>{}</sub>" . format ( self . H_count ) else : text = "" seq = [ self . symbol , text , self . charge_sign_html ( ) ] if reversed_ : seq = reversed ( seq ) return "" . join ( seq )
Chemical formula HTML
11,754
def charge_sign ( self ) : if self . charge > 0 : sign = "+" elif self . charge < 0 : sign = "–" else : return "" ab = abs ( self . charge ) if ab > 1 : return str ( ab ) + sign return sign
Charge sign text
11,755
def send_message ( self , message ) : with self . _instance_lock : if message is None : Global . LOGGER . error ( "can't deliver a null messages" ) return if message . sender is None : Global . LOGGER . error ( f"can't deliver anonymous messages with body {message.body}" ) return if message . receiver is None : Global . LOGGER . error ( f"can't deliver message from {message.sender}: recipient not specified" ) return if message . message is None : Global . LOGGER . error ( f"can't deliver message with no body from {message.sender}" ) return sender = "*" + message . sender + "*" self . socket . send_multipart ( [ bytes ( sender , 'utf-8' ) , pickle . dumps ( message ) ] ) if Global . CONFIG_MANAGER . tracing_mode : Global . LOGGER . debug ( "dispatched : " + message . sender + "-" + message . message + "-" + message . receiver ) self . dispatched = self . dispatched + 1
Dispatch a message using 0mq
11,756
def update_properties_cache ( sender , instance , action , reverse , model , pk_set , ** kwargs ) : "Property cache actualization at POI save. It will not work yet after property removal." if action == 'post_add' : instance . save_properties_cache ( )
Property cache actualization at POI save . It will not work yet after property removal .
11,757
def to_json ( self ) : capsule = { } capsule [ "Hierarchy" ] = [ ] for ( dying , ( persistence , surviving , saddle ) , ) in self . merge_sequence . items ( ) : capsule [ "Hierarchy" ] . append ( { "Persistence" : persistence , "Dying" : dying , "Surviving" : surviving , "Saddle" : saddle , } ) capsule [ "Partitions" ] = [ ] base = np . array ( [ None ] * len ( self . Y ) ) for label , items in self . base_partitions . items ( ) : base [ items ] = label capsule [ "Partitions" ] = base . tolist ( ) return json . dumps ( capsule , separators = ( "," , ":" ) )
Writes the complete Morse complex merge hierarchy to a string object .
11,758
def iter ( context , sequence , limit = 10 ) : params = { 'limit' : limit , 'offset' : 0 } uri = '%s/%s/%s' % ( context . dci_cs_api , RESOURCE , sequence ) while True : j = context . session . get ( uri , params = params ) . json ( ) if len ( j [ 'jobs_events' ] ) : for i in j [ 'jobs_events' ] : yield i else : break params [ 'offset' ] += params [ 'limit' ]
Iter to list all the jobs events .
11,759
def delete ( context , sequence ) : uri = '%s/%s/%s' % ( context . dci_cs_api , RESOURCE , sequence ) return context . session . delete ( uri )
Delete jobs events from a given sequence
11,760
def get_ldap ( cls , global_options = None ) : if cls . ldap is None : import ldap . filter try : import ldap . dn except ImportError : from django_auth_ldap import dn ldap . dn = dn cls . ldap = ldap if ( not cls . _ldap_configured ) and ( global_options is not None ) : for opt , value in global_options . items ( ) : cls . ldap . set_option ( opt , value ) cls . _ldap_configured = True return cls . ldap
Returns the ldap module . The unit test harness will assign a mock object to _LDAPConfig . ldap . It is imperative that the ldap module not be imported anywhere else so that the unit tests will pass in the absence of python - ldap .
11,761
def _begin ( self , connection , filterargs = ( ) , escape = True ) : if escape : filterargs = self . _escape_filterargs ( filterargs ) try : filterstr = self . filterstr % filterargs msgid = connection . search ( force_str ( self . base_dn ) , self . scope , force_str ( filterstr ) ) except ldap . LDAPError as e : msgid = None logger . error ( u"search('%s', %d, '%s') raised %s" % ( self . base_dn , self . scope , filterstr , pprint . pformat ( e ) ) ) return msgid
Begins an asynchronous search and returns the message id to retrieve the results .
11,762
def _results ( self , connection , msgid ) : try : kind , results = connection . result ( msgid ) if kind != ldap . RES_SEARCH_RESULT : results = [ ] except ldap . LDAPError as e : results = [ ] logger . error ( u"result(%d) raised %s" % ( msgid , pprint . pformat ( e ) ) ) return self . _process_results ( results )
Returns the result of a previous asynchronous query .
11,763
def _escape_filterargs ( self , filterargs ) : if isinstance ( filterargs , tuple ) : filterargs = tuple ( self . ldap . filter . escape_filter_chars ( value ) for value in filterargs ) elif isinstance ( filterargs , dict ) : filterargs = dict ( ( key , self . ldap . filter . escape_filter_chars ( value ) ) for key , value in filterargs . items ( ) ) else : raise TypeError ( "filterargs must be a tuple or dict." ) return filterargs
Escapes values in filterargs .
11,764
def _process_results ( self , results ) : results = [ r for r in results if r [ 0 ] is not None ] results = _DeepStringCoder ( 'utf-8' ) . decode ( results ) results = [ ( r [ 0 ] . lower ( ) , r [ 1 ] ) for r in results ] result_dns = [ result [ 0 ] for result in results ] logger . debug ( u"search_s('%s', %d, '%s') returned %d objects: %s" % ( self . base_dn , self . scope , self . filterstr , len ( result_dns ) , "; " . join ( result_dns ) ) ) return results
Returns a sanitized copy of raw LDAP results . This scrubs out references decodes utf8 normalizes DNs etc .
11,765
def get_connection_string ( params , hide_password = True ) : connection_string = params [ 'driver' ] + '://' user = params . get ( 'user' , None ) password = params . get ( 'password' , None ) host = params . get ( 'host' , None ) port = params . get ( 'port' , None ) database = params . get ( 'database' , None ) if database is None : raise ValueError ( "Field 'database' of connection parameters cannot be None." ) if password is None and user is not None : password = Client . _get_password ( params ) if password is None : raise RuntimeError ( "Password not defined and not available in keyring." ) if host is not None : if user is not None : connection_string += user if len ( password ) > 0 : if hide_password : connection_string += ":[password hidden]" else : connection_string += ":" + password connection_string += "@" connection_string += host if port is not None : connection_string += ':' + str ( port ) connection_string += '/' + database return connection_string
Get a database connection string
11,766
def pubticker ( self , symbol = 'btcusd' ) : url = self . base_url + '/v1/pubticker/' + symbol return requests . get ( url )
Send a request for latest ticker info return the response .
11,767
def book ( self , symbol = 'btcusd' , limit_bids = 0 , limit_asks = 0 ) : url = self . base_url + '/v1/book/' + symbol params = { 'limit_bids' : limit_bids , 'limit_asks' : limit_asks } return requests . get ( url , params )
Send a request to get the public order book return the response .
11,768
def trades ( self , symbol = 'btcusd' , since = 0 , limit_trades = 50 , include_breaks = 0 ) : url = self . base_url + '/v1/trades/' + symbol params = { 'since' : since , 'limit_trades' : limit_trades , 'include_breaks' : include_breaks } return requests . get ( url , params )
Send a request to get all public trades return the response .
11,769
def auction ( self , symbol = 'btcusd' ) : url = self . base_url + '/v1/auction/' + symbol return requests . get ( url )
Send a request for latest auction info return the response .
11,770
def auction_history ( self , symbol = 'btcusd' , since = 0 , limit_auction_results = 50 , include_indicative = 1 ) : url = self . base_url + '/v1/auction/' + symbol + '/history' params = { 'since' : since , 'limit_auction_results' : limit_auction_results , 'include_indicative' : include_indicative } return requests . get ( url , params )
Send a request for auction history info return the response .
11,771
def new_order ( self , amount , price , side , client_order_id = None , symbol = 'btcusd' , type = 'exchange limit' , options = None ) : request = '/v1/order/new' url = self . base_url + request params = { 'request' : request , 'nonce' : self . get_nonce ( ) , 'symbol' : symbol , 'amount' : amount , 'price' : price , 'side' : side , 'type' : type } if client_order_id is not None : params [ 'client_order_id' ] = client_order_id if options is not None : params [ 'options' ] = options return requests . post ( url , headers = self . prepare ( params ) )
Send a request to place an order return the response .
11,772
def cancel_order ( self , order_id ) : request = '/v1/order/cancel' url = self . base_url + request params = { 'request' : request , 'nonce' : self . get_nonce ( ) , 'order_id' : order_id } return requests . post ( url , headers = self . prepare ( params ) )
Send a request to cancel an order return the response .
11,773
def past_trades ( self , symbol = 'btcusd' , limit_trades = 50 , timestamp = 0 ) : request = '/v1/mytrades' url = self . base_url + request params = { 'request' : request , 'nonce' : self . get_nonce ( ) , 'symbol' : symbol , 'limit_trades' : limit_trades , 'timestamp' : timestamp } return requests . post ( url , headers = self . prepare ( params ) )
Send a trade history request return the response .
11,774
def tradevolume ( self ) : request = '/v1/tradevolume' url = self . base_url + request params = { 'request' : request , 'nonce' : self . get_nonce ( ) } return requests . post ( url , headers = self . prepare ( params ) )
Send a request to get your trade volume return the response .
11,775
def newAddress ( self , currency = 'btc' , label = '' ) : request = '/v1/deposit/' + currency + '/newAddress' url = self . base_url + request params = { 'request' : request , 'nonce' : self . get_nonce ( ) } if label != '' : params [ 'label' ] = label return requests . post ( url , headers = self . prepare ( params ) )
Send a request for a new cryptocurrency deposit address with an optional label . Return the response .
11,776
def prepare ( self , params ) : jsonparams = json . dumps ( params ) payload = base64 . b64encode ( jsonparams . encode ( ) ) signature = hmac . new ( self . secret_key . encode ( ) , payload , hashlib . sha384 ) . hexdigest ( ) return { 'X-GEMINI-APIKEY' : self . api_key , 'X-GEMINI-PAYLOAD' : payload , 'X-GEMINI-SIGNATURE' : signature }
Prepare return the required HTTP headers .
11,777
def merge ( cls , source_blocks ) : if len ( source_blocks ) == 1 : return source_blocks [ 0 ] source_blocks . sort ( key = operator . attrgetter ( 'start_line_number' ) ) main_block = source_blocks [ 0 ] boot_lines = main_block . boot_lines source_lines = [ source_line for source_block in source_blocks for source_line in source_block . source_lines ] return cls ( boot_lines , source_lines , directive = main_block . directive , language = main_block . language , roles = main_block . roles )
Merge multiple SourceBlocks together
11,778
def character_summary_table ( ) : cl = client . get_client ( ) session = cl . create_session ( ) query = session . query ( models . Character , models . Universe . name . label ( 'universe' ) , models . Place . name . label ( 'place_of_birth' ) ) . join ( models . Character . universe ) . outerjoin ( models . Character . place_of_birth ) characters = cl . df_query ( query ) . set_index ( 'id' ) query = session . query ( sa . func . count ( models . MovieAppearance . id ) . label ( 'movie_appearances' ) , models . MovieAppearance . character_id ) . group_by ( models . MovieAppearance . character_id ) appearances = cl . df_query ( query ) . set_index ( 'character_id' ) df = characters . join ( appearances , how = 'left' ) . sort_values ( by = 'name' ) df = df . drop ( [ 'universe_id' , 'place_of_birth_id' ] , axis = 1 ) df . to_csv ( path . join ( out_dir , "characters.csv" ) , encoding = 'utf-8' , index = False ) df . to_excel ( path . join ( out_dir , "characters.xlsx" ) , encoding = 'utf-8' , index = False ) session . close ( )
Export a table listing all characters and their data
11,779
def fig_to_svg ( fig ) : buf = io . StringIO ( ) fig . savefig ( buf , format = 'svg' ) buf . seek ( 0 ) return buf . getvalue ( )
Helper function to convert matplotlib figure to SVG string
11,780
def movie_network ( ) : template = jenv . get_template ( "movie_network.html" ) context = dict ( ) cl = client . get_client ( ) session = cl . create_session ( ) query = session . query ( models . Movie . id , models . Movie . name , models . Movie . url , models . Movie . budget_inflation_adjusted , models . Movie . imdb_rating ) movies = cl . df_query ( query ) query = session . query ( models . MovieAppearance . movie_id , models . MovieAppearance . character_id ) appearances = cl . df_query ( query ) query = session . query ( models . Character . id , models . Character . url , models . Character . name ) . filter ( models . Character . id . in_ ( [ int ( i ) for i in appearances [ 'character_id' ] . unique ( ) ] ) ) characters = cl . df_query ( query ) graph = dict ( nodes = [ ] , graph = [ ] , links = [ ] , directed = False , multigraph = True ) movie_node_id = dict ( ) character_node_id = dict ( ) movie_size_factor = 100. / movies [ 'budget_inflation_adjusted' ] . max ( ) for _ , data in movies . iterrows ( ) : movie_node_id [ data [ 'id' ] ] = len ( graph [ 'nodes' ] ) graph [ 'nodes' ] . append ( dict ( id = data [ 'name' ] , size = max ( 5. , data [ 'budget_inflation_adjusted' ] * movie_size_factor ) , score = data [ 'imdb_rating' ] / 10. , type = 'square' , url = "http://marvel.wikia.com" + data [ 'url' ] ) ) for _ , data in characters . iterrows ( ) : character_node_id [ data [ 'id' ] ] = len ( graph [ 'nodes' ] ) graph [ 'nodes' ] . append ( dict ( id = data [ 'name' ] , size = 10 , type = 'circle' , url = "http://marvel.wikia.com" + data [ 'url' ] ) ) for _ , data in appearances . iterrows ( ) : graph [ 'links' ] . append ( dict ( source = movie_node_id [ data [ 'movie_id' ] ] , target = character_node_id [ data [ 'character_id' ] ] ) ) context [ 'graph' ] = json . dumps ( graph , indent = 4 ) out_file = path . join ( out_dir , "movie_network.html" ) html_content = template . render ( ** context ) with open ( out_file , 'w' ) as f : f . write ( html_content ) plt . close ( 'all' ) session . close ( )
Generate interactive network graph of movie appearances
11,781
def unpack2D ( _x ) : _x = np . atleast_2d ( _x ) x = _x [ : , 0 ] y = _x [ : , 1 ] return x , y
Helper function for splitting 2D data into x and y component to make equations simpler
11,782
def is_at_exit ( ) : if _threading_main_thread is not None : if not hasattr ( threading , "main_thread" ) : return True if threading . main_thread ( ) != _threading_main_thread : return True if not _threading_main_thread . is_alive ( ) : return True return False
Some heuristics to figure out whether this is called at a stage where the Python interpreter is shutting down .
11,783
def better_exchook ( etype , value , tb , debugshell = False , autodebugshell = True , file = None , with_color = None ) : if file is None : file = sys . stderr def output ( ln ) : file . write ( ln + "\n" ) color = Color ( enable = with_color ) output ( color ( "EXCEPTION" , color . fg_colors [ 1 ] , bold = True ) ) all_locals , all_globals = { } , { } if tb is not None : print_tb ( tb , allLocals = all_locals , allGlobals = all_globals , file = file , withTitle = True , with_color = color . enable ) else : output ( color ( "better_exchook: traceback unknown" , color . fg_colors [ 1 ] ) ) import types def _some_str ( value ) : try : return str ( value ) except Exception : return '<unprintable %s object>' % type ( value ) . __name__ def _format_final_exc_line ( etype , value ) : value_str = _some_str ( value ) if value is None or not value_str : line = color ( "%s" % etype , color . fg_colors [ 1 ] ) else : line = color ( "%s" % etype , color . fg_colors [ 1 ] ) + ": %s" % ( value_str , ) return line if ( isinstance ( etype , BaseException ) or ( hasattr ( types , "InstanceType" ) and isinstance ( etype , types . InstanceType ) ) or etype is None or type ( etype ) is str ) : output ( _format_final_exc_line ( etype , value ) ) else : output ( _format_final_exc_line ( etype . __name__ , value ) ) if autodebugshell : try : debugshell = int ( os . environ [ "DEBUG" ] ) != 0 except Exception : pass if debugshell : output ( "---------- DEBUG SHELL -----------" ) debug_shell ( user_ns = all_locals , user_global_ns = all_globals , traceback = tb ) file . flush ( )
Replacement for sys . excepthook .
11,784
def dump_all_thread_tracebacks ( exclude_thread_ids = None , file = None ) : if exclude_thread_ids is None : exclude_thread_ids = [ ] if not file : file = sys . stdout import threading if hasattr ( sys , "_current_frames" ) : print ( "" , file = file ) threads = { t . ident : t for t in threading . enumerate ( ) } for tid , stack in sys . _current_frames ( ) . items ( ) : if tid in exclude_thread_ids : continue if tid not in threads : continue tags = [ ] thread = threads . get ( tid ) if thread : assert isinstance ( thread , threading . Thread ) if thread is threading . currentThread ( ) : tags += [ "current" ] if isinstance ( thread , threading . _MainThread ) : tags += [ "main" ] tags += [ str ( thread ) ] else : tags += [ "unknown with id %i" % tid ] print ( "Thread %s:" % ", " . join ( tags ) , file = file ) print_tb ( stack , file = file ) print ( "" , file = file ) print ( "That were all threads." , file = file ) else : print ( "Does not have sys._current_frames, cannot get thread tracebacks." , file = file )
Prints the traceback of all threads .
11,785
def _main ( ) : if sys . argv [ 1 : ] == [ "test" ] : for k , v in sorted ( globals ( ) . items ( ) ) : if not k . startswith ( "test_" ) : continue print ( "running: %s()" % k ) v ( ) print ( "ok." ) sys . exit ( ) elif sys . argv [ 1 : ] == [ "debug_shell" ] : debug_shell ( locals ( ) , globals ( ) ) sys . exit ( ) elif sys . argv [ 1 : ] == [ "debug_shell_exception" ] : try : raise Exception ( "demo exception" ) except Exception : better_exchook ( * sys . exc_info ( ) , debugshell = True ) sys . exit ( ) elif sys . argv [ 1 : ] : print ( "Usage: %s (test|...)" % sys . argv [ 0 ] ) sys . exit ( 1 ) try : x = { 1 : 2 , "a" : "b" } def f ( ) : y = "foo" x , 42 , sys . stdin . __class__ , sys . exc_info , y , z f ( ) except Exception : better_exchook ( * sys . exc_info ( ) ) try : ( lambda _x : None ) ( __name__ , 42 ) except Exception : better_exchook ( * sys . exc_info ( ) ) try : class Obj : def __repr__ ( self ) : return ( "<Obj multi-\n" + " line repr>" ) obj = Obj ( ) assert not obj except Exception : better_exchook ( * sys . exc_info ( ) ) def f1 ( a ) : f2 ( a + 1 , 2 ) def f2 ( a , b ) : f3 ( a + b ) def f3 ( a ) : b = ( "abc" * 100 ) + "-interesting" a ( b ) try : f1 ( 13 ) except Exception : better_exchook ( * sys . exc_info ( ) ) install ( ) finalfail ( sys )
Some demo .
11,786
def verify_mid_signature ( certificate_data , sp_challenge , response_challenge , signature ) : if not response_challenge . startswith ( sp_challenge ) : return False try : key = RSA . importKey ( certificate_data ) verifier = PKCS1_v1_5 . new ( key ) except ValueError : key = ECC . import_key ( certificate_data ) verifier = DSS . new ( key , 'deterministic-rfc6979' ) digest = PrehashedMessageData ( response_challenge ) try : verifier . verify ( digest , signature ) return True except ValueError : return False
Verify mobile id Authentication signature is valid
11,787
def drive ( self , event , * args ) : maps = self . base . get ( event , self . step ) for handle , data in maps [ : ] : params = args + data try : handle ( self , * params ) except Stop : break except StopIteration : pass except Kill as Root : raise except Erase : maps . remove ( ( handle , data ) ) except Exception as e : debug ( event , params ) for handle in self . pool : handle ( self , event , args )
Used to dispatch events .
11,788
def send ( self , data ) : self . stdin . write ( data ) self . stdin . flush ( )
Send data to the child process through .
11,789
def _simplify_arguments ( arguments ) : if len ( arguments . args ) == 0 : return arguments . kwargs elif len ( arguments . kwargs ) == 0 : return arguments . args else : return arguments
If positional or keyword arguments are empty return only one or the other .
11,790
def load ( self ) : hdf_filename = os . path . join ( self . _dump_dirname , 'result.h5' ) if os . path . isfile ( hdf_filename ) : store = pd . HDFStore ( hdf_filename , mode = 'r' ) keys = store . keys ( ) if keys == [ '/df' ] : self . result = store [ 'df' ] else : if set ( keys ) == set ( map ( lambda i : '/%s' % i , range ( len ( keys ) ) ) ) : self . result = [ store [ str ( k ) ] for k in range ( len ( keys ) ) ] else : self . result = { k [ 1 : ] : store [ k ] for k in keys } else : self . result = joblib . load ( os . path . join ( self . _output_dirname , 'dump' , 'result.pkl' ) )
Load this step s result from its dump directory
11,791
def setup_dump ( self ) : dumpdir = self . _dump_dirname if not os . path . isdir ( dumpdir ) : os . makedirs ( dumpdir ) dump = False yaml_filename = self . _yaml_filename if not os . path . isfile ( yaml_filename ) : dump = True else : with open ( yaml_filename ) as f : if f . read ( ) != yaml . dump ( self ) : logging . warning ( 'Existing step.yaml does not match hash, regenerating' ) dump = True if dump : with open ( yaml_filename , 'w' ) as f : yaml . dump ( self , f )
Set up dump creating directories and writing step . yaml file containing yaml dump of this step .
11,792
def main ( ctx , root_dir , verbose ) : root_dir = discover_package_doc_dir ( root_dir ) ctx . obj = { 'root_dir' : root_dir , 'verbose' : verbose } if verbose : log_level = logging . DEBUG else : log_level = logging . INFO logger = logging . getLogger ( 'documenteer' ) logger . addHandler ( logging . StreamHandler ( ) ) logger . setLevel ( log_level )
package - docs is a CLI for building single - package previews of documentation in the LSST Stack .
11,793
def apply_and_name ( self , aggregator ) : reduced_df = self . _apply ( aggregator ) if len ( self . names ) != len ( reduced_df . columns ) : raise IndexError ( "ColumnFunction creates more columns than it has names for." ) reduced_df . columns = self . names return reduced_df
Fetches the row - aggregated input columns for this ColumnFunction .
11,794
def aggregate ( self , index ) : if isinstance ( index , string_types ) : col_df_grouped = self . col_df . groupby ( self . df [ index ] ) else : self . col_df . index = pd . MultiIndex . from_arrays ( [ self . df [ i ] for i in index ] ) col_df_grouped = self . col_df . groupby ( level = index ) self . col_df . index = self . df . index self . reduced_df = pd . DataFrame ( { colred : col_df_grouped [ colred . column ] . agg ( colred . agg_func ) for colred in self . column_reductions } ) reduced_dfs = [ ] for cf in self . column_functions : reduced_dfs . append ( cf . apply_and_name ( self ) ) return pd . concat ( reduced_dfs , axis = 1 )
Performs a groupby of the unique Columns by index as constructed from self . df .
11,795
def _apply ( self , aggregator ) : reduced_dfs = [ ] if self . include_fraction : n_df = self . numerator . apply_and_name ( aggregator ) d_df = self . denominator . apply_and_name ( aggregator ) reduced_dfs . extend ( [ n_df [ cn ] / d_df [ cd ] for cn , cd in product ( n_df . columns , d_df . columns ) ] ) if self . include_numerator : reduced_dfs . append ( self . numerator . apply_and_name ( aggregator ) ) if self . include_denominator : reduced_dfs . append ( self . denominator . apply_and_name ( aggregator ) ) return pd . concat ( reduced_dfs , axis = 1 )
Returns a dataframe with the requested ColumnReductions .
11,796
def clone ( self , ** kwargs ) : init_kwargs = { "name" : self . __name , "dataframe" : self . __df , "include_columns" : self . __include_columns , "include_index" : self . __include_index , "style" : self . __style , "column_styles" : self . __col_styles , "column_widths" : self . __column_widths , "row_styles" : self . __row_styles , "header_style" : self . header_style , "index_style" : self . index_style } init_kwargs . update ( kwargs ) return self . __class__ ( ** init_kwargs )
Create a clone of the Table optionally with some properties changed
11,797
def inspect ( lines ) : labels = set ( ) count = 0 exp = re . compile ( r">.*?<([\w ]+)>" ) valid = False for line in lines : if line . startswith ( "M END\n" ) : valid = True elif line . startswith ( "$$$$" ) : count += 1 valid = False else : result = exp . match ( line ) if result : labels . add ( result . group ( 1 ) ) if valid : count += 1 return list ( labels ) , count
Inspect SDFile list of string
11,798
def inspect_file ( path ) : with open ( path , 'rb' ) as f : labels , count = inspect ( tx . decode ( line ) for line in f ) return labels , count
Inspect SDFile structure
11,799
def optional_data ( lines ) : data = { } exp = re . compile ( r">.*?<([\w ]+)>" ) for i , line in enumerate ( lines ) : result = exp . match ( line ) if result : data [ result . group ( 1 ) ] = lines [ i + 1 ] return data
Parse SDFile data part into dict