idx int64 0 63k | question stringlengths 53 5.28k | target stringlengths 5 805 |
|---|---|---|
11,600 | def is_open ( self , id , time , day ) : details = self . get_details ( id ) has_data = False for obj in details [ "objects" ] : hours = obj [ "open_hours" ] [ day ] if hours : has_data = True for interval in hours : interval = interval . replace ( ' ' , '' ) . split ( '-' ) open_time = interval [ 0 ] close_time = inte... | Checks if the venue is open at the time of day given a venue id . |
11,601 | def search ( self , name = None , category = None , description = None , price = None , price__gt = None , price__gte = None , price__lt = None , price__lte = None , location = ( None , None ) , radius = None , tl_coord = ( None , None ) , br_coord = ( None , None ) , country = None , locality = None , region = None , ... | Locu Menu Item Search API Call Wrapper |
11,602 | def main ( ctx , root_project_dir , verbose ) : root_project_dir = discover_conf_py_directory ( root_project_dir ) ctx . obj = { 'root_project_dir' : root_project_dir , 'verbose' : verbose } if verbose : log_level = logging . DEBUG else : log_level = logging . INFO logger = logging . getLogger ( 'documenteer' ) logger ... | stack - docs is a CLI for building LSST Stack documentation such as pipelines . lsst . io . |
11,603 | def help ( ctx , topic , ** kw ) : if topic is None : click . echo ( ctx . parent . get_help ( ) ) else : click . echo ( main . commands [ topic ] . get_help ( ctx ) ) | Show help for any command . |
11,604 | def clean ( ctx ) : logger = logging . getLogger ( __name__ ) dirnames = [ 'py-api' , '_build' , 'modules' , 'packages' ] dirnames = [ os . path . join ( ctx . obj [ 'root_project_dir' ] , dirname ) for dirname in dirnames ] for dirname in dirnames : if os . path . isdir ( dirname ) : shutil . rmtree ( dirname ) logger... | Clean Sphinx build products . |
11,605 | def query_with_attributes ( type_to_query , client ) : session = client . create_session ( ) query = session . query ( Attribute . name , Attribute . value , Entity . id ) . join ( Entity ) . filter ( Entity . type == type_to_query ) df = client . df_query ( query ) session . close ( ) df = df . dropna ( how = 'any' ) ... | Query all entities of a specific type with their attributes |
11,606 | def reset ( self ) : for name in self . __dict__ : if name . startswith ( "_" ) : continue attr = getattr ( self , name ) setattr ( self , name , attr and attr . __class__ ( ) ) | Reset all fields of this object to class defaults |
11,607 | def geojson_polygon_to_mask ( feature , shape , lat_idx , lon_idx ) : import matplotlib matplotlib . use ( 'agg' ) import matplotlib . pyplot as plt from matplotlib import patches import numpy as np if feature . geometry . type not in ( 'Polygon' , 'MultiPolygon' ) : raise ValueError ( "Cannot handle feature of type " ... | Convert a GeoJSON polygon feature to a numpy array |
11,608 | def load ( self ) : df = pd . read_csv ( self . input_file , sep = ',' , quotechar = '"' , encoding = 'utf-8' , dtype = object ) df = df [ [ 'NUTS-Code' , 'Description' ] ] df . columns = [ 'key' , 'name' ] df = df [ df [ 'key' ] . str . len ( ) == 4 ] df = df [ df [ 'key' ] . str [ 2 : ] != 'ZZ' ] return df | Load data from default location |
11,609 | def input_file ( self ) : return path . join ( path . dirname ( __file__ ) , 'data' , 'tgs{:s}.tsv' . format ( self . number ) ) | Returns the input file name with a default relative path |
11,610 | def load ( self , key_filter = None , header_preproc = None ) : df = pd . read_csv ( self . input_file , sep = '\t' , dtype = object ) if key_filter is not None : df = df [ df [ df . columns [ 0 ] ] . str . match ( key_filter ) ] meta_col = df . columns [ 0 ] df [ meta_col ] = df [ meta_col ] . str . split ( ',' ) . st... | Load data table from tsv file from default location |
11,611 | def load ( self ) : from scipy . io import netcdf_file from scipy import interpolate import numpy as np f = netcdf_file ( self . input_file ) out = dict ( ) lats = f . variables [ 'lat' ] [ : ] . copy ( ) lons = f . variables [ 'lon' ] [ : ] . copy ( ) out [ 'data' ] = np . roll ( f . variables [ self . variable_name ]... | Load the climate data as a map |
11,612 | def clear ( self ) : self . mark_incomplete ( ) for suffix in list ( CLIMATE_SEASON_SUFFIXES . values ( ) ) : try : indicator = self . session . query ( models . ClimateIndicator ) . filter ( models . ClimateIndicator . description == self . description + suffix ) . one ( ) self . session . delete ( indicator ) except ... | Clear output of one climate variable |
11,613 | def run ( self ) : import numpy as np query = self . session . query ( models . NUTS2Region . key , models . NUTS2Region . id ) region_ids = self . client . df_query ( query ) . set_index ( 'key' ) [ 'id' ] . to_dict ( ) data = next ( self . requires ( ) ) . load ( ) nuts = NUTS2GeoJSONInputFile ( ) . load ( ) indicato... | Load climate data and convert to indicator objects |
11,614 | def lose ( spin ) : try : spin . close ( ) except Exception as excpt : err = excpt . args [ 0 ] spin . drive ( CLOSE_ERR , err ) finally : spin . destroy ( ) spin . drive ( LOST ) | It is used to close TCP connection and unregister the Spin instance from untwisted reactor . |
11,615 | def create_server ( addr , port , backlog ) : server = Spin ( ) server . bind ( ( addr , port ) ) server . listen ( backlog ) Server ( server ) server . add_map ( ACCEPT , lambda server , spin : install_basic_handles ( spin ) ) return server | Set up a TCP server and installs the basic handles Stdin Stdout in the clients . |
11,616 | def create_client ( addr , port ) : sock = socket . socket ( socket . AF_INET , socket . SOCK_STREAM ) sock . connect_ex ( ( addr , port ) ) spin = Spin ( sock ) Client ( spin ) spin . add_map ( CONNECT , install_basic_handles ) spin . add_map ( CONNECT_ERR , lambda con , err : lose ( con ) ) return spin | Set up a TCP client and installs the basic handles Stdin Stdout . |
11,617 | def main ( argv = None ) : app = application . Application ( ) app . run ( argv ) app . exit ( ) | Execute the main bit of the application . |
11,618 | def fingerprint_similarity ( mol1 , mol2 ) : idmol1 = to_real_mol ( mol1 ) idmol2 = to_real_mol ( mol2 ) fp1 = idmol1 . fingerprint ( "sim" ) fp2 = idmol2 . fingerprint ( "sim" ) return round ( idg . similarity ( fp1 , fp2 , "tanimoto" ) , 2 ) | Calculate Indigo fingerprint similarity |
11,619 | def devmodel_to_array ( model_name , train_fraction = 1 ) : model_outputs = - 6 + model_name . Data_summary . shape [ 0 ] devmodel = model_name rawdf = devmodel . Data rawdf = rawdf . sample ( frac = 1 ) datadf = rawdf . select_dtypes ( include = [ np . number ] ) data = np . array ( datadf ) n = data . shape [ 0 ] d =... | a standardized method of turning a dev_model object into training and testing arrays |
11,620 | def dapply ( self , fn , pairwise = False , symmetric = True , diagonal = False , block = None , ** kwargs ) : search_keys = [ k for k , v in kwargs . items ( ) if isinstance ( v , list ) and len ( v ) > 1 ] functions = util . make_list ( fn ) search = list ( product ( functions , util . dict_product ( kwargs ) ) ) res... | Apply function to each step object in the index |
11,621 | def _identifyBranches ( self ) : if self . debug : sys . stdout . write ( "Identifying branches: " ) start = time . clock ( ) seen = set ( ) self . branches = set ( ) for e1 , e2 in self . edges : if e1 not in seen : seen . add ( e1 ) else : self . branches . add ( e1 ) if e2 not in seen : seen . add ( e2 ) else : self... | A helper function for determining all of the branches in the tree . This should be called after the tree has been fully constructed and its nodes and edges are populated . |
11,622 | def _identifySuperGraph ( self ) : if self . debug : sys . stdout . write ( "Condensing Graph: " ) start = time . clock ( ) G = nx . DiGraph ( ) G . add_edges_from ( self . edges ) if self . short_circuit : self . superNodes = G . nodes ( ) self . superArcs = G . edges ( ) return self . augmentedEdges = { } N = len ( s... | A helper function for determining the condensed representation of the tree . That is one that does not hold all of the internal nodes of the graph . The results will be stored in ContourTree . superNodes and ContourTree . superArcs . These two can be used to potentially speed up queries by limiting the searching on the... |
11,623 | def get_seeds ( self , threshold ) : seeds = [ ] for e1 , e2 in self . superArcs : if self . Y [ e1 ] <= threshold <= self . Y [ e2 ] : if ( e1 , e2 ) in self . augmentedEdges : edgeList = self . augmentedEdges [ ( e1 , e2 ) ] elif ( e2 , e1 ) in self . augmentedEdges : e1 , e2 = e2 , e1 edgeList = list ( reversed ( se... | Returns a list of seed points for isosurface extraction given a threshold value |
11,624 | def _construct_nx_tree ( self , thisTree , thatTree = None ) : if self . debug : sys . stdout . write ( "Networkx Tree construction: " ) start = time . clock ( ) nxTree = nx . DiGraph ( ) nxTree . add_edges_from ( thisTree . edges ) nodesOfThatTree = [ ] if thatTree is not None : nodesOfThatTree = thatTree . nodes . ke... | A function for creating networkx instances that can be used more efficiently for graph manipulation than the MergeTree class . |
11,625 | def _process_tree ( self , thisTree , thatTree ) : if self . debug : sys . stdout . write ( "Processing Tree: " ) start = time . clock ( ) if len ( thisTree . nodes ( ) ) > 1 : leaves = set ( [ v for v in thisTree . nodes ( ) if thisTree . in_degree ( v ) == 0 and thatTree . in_degree ( v ) < 2 ] ) else : leaves = set ... | A function that will process either a split or join tree with reference to the other tree and store it as part of this CT instance . |
11,626 | def read_git_branch ( ) : if os . getenv ( 'TRAVIS' ) : return os . getenv ( 'TRAVIS_BRANCH' ) else : try : repo = git . repo . base . Repo ( search_parent_directories = True ) return repo . active_branch . name except Exception : return '' | Obtain the current branch name from the Git repository . If on Travis CI use the TRAVIS_BRANCH environment variable . |
11,627 | def read_git_commit_timestamp ( repo_path = None ) : repo = git . repo . base . Repo ( path = repo_path , search_parent_directories = True ) head_commit = repo . head . commit return head_commit . committed_datetime | Obtain the timestamp from the current head commit of a Git repository . |
11,628 | def read_git_commit_timestamp_for_file ( filepath , repo_path = None ) : repo = git . repo . base . Repo ( path = repo_path , search_parent_directories = True ) head_commit = repo . head . commit for commit in head_commit . iter_parents ( filepath ) : return commit . committed_datetime raise IOError ( 'File {} not foun... | Obtain the timestamp for the most recent commit to a given file in a Git repository . |
11,629 | def get_filepaths_with_extension ( extname , root_dir = '.' ) : if not extname . startswith ( '.' ) : extname = '.' + extname extname = extname . lower ( ) root_dir = os . path . abspath ( root_dir ) selected_filenames = [ ] for dirname , sub_dirnames , filenames in os . walk ( root_dir ) : for filename in filenames : ... | Get relative filepaths of files in a directory and sub - directories with the given extension . |
11,630 | def get_project_content_commit_date ( root_dir = '.' , exclusions = None ) : logger = logging . getLogger ( __name__ ) extensions = ( 'rst' , 'ipynb' , 'png' , 'jpeg' , 'jpg' , 'svg' , 'gif' ) content_paths = [ ] for extname in extensions : content_paths += get_filepaths_with_extension ( extname , root_dir = root_dir )... | Get the datetime for the most recent commit to a project that affected Sphinx content . |
11,631 | def form_ltd_edition_name ( git_ref_name = None ) : if git_ref_name is None : name = read_git_branch ( ) else : name = git_ref_name m = TICKET_BRANCH_PATTERN . match ( name ) if m is not None : return m . group ( 1 ) m = TAG_PATTERN . match ( name ) if m is not None : return name if name == 'master' : name = 'Current' ... | Form the LSST the Docs edition name for this branch using the same logic as LTD Keeper does for transforming branch names into edition names . |
11,632 | def itersheets ( self ) : for ws in self . worksheets : prev_ws = self . active_worksheet self . active_worksheet = ws try : yield ws finally : self . active_worksheet = prev_ws | Iterates over the worksheets in the book and sets the active worksheet as the current one before yielding . |
11,633 | def to_xlsx ( self , ** kwargs ) : from xlsxwriter . workbook import Workbook as _Workbook self . workbook_obj = _Workbook ( ** kwargs ) self . workbook_obj . set_calc_mode ( self . calc_mode ) for worksheet in self . itersheets ( ) : worksheet . to_xlsx ( workbook = self ) self . workbook_obj . filename = self . filen... | Write workbook to a . xlsx file using xlsxwriter . Return a xlsxwriter . workbook . Workbook . |
11,634 | def get_table ( self , name ) : if name is None : assert self . active_table , "Can't get table without name unless an active table is set" name = self . active_table . name if self . active_worksheet : table = self . active_worksheet . get_table ( name ) assert table is self . active_table , "Active table is not from ... | Return a table worksheet pair for the named table |
11,635 | def send_message ( self , output ) : file_system_event = None if self . my_action_input : file_system_event = self . my_action_input . file_system_event or None output_action = ActionInput ( file_system_event , output , self . name , "*" ) Global . MESSAGE_DISPATCHER . send_message ( output_action ) | Send a message to the socket |
11,636 | def stop ( self ) : Global . LOGGER . debug ( f"action {self.name} stopped" ) self . is_running = False self . on_stop ( ) | Stop the current action |
11,637 | def run ( self ) : Global . LOGGER . debug ( f"action {self.name} is running" ) for tmp_monitored_input in self . monitored_input : sender = "*" + tmp_monitored_input + "*" Global . LOGGER . debug ( f"action {self.name} is monitoring {sender}" ) while self . is_running : try : time . sleep ( Global . CONFIG_MANAGER . s... | Start the action |
11,638 | def create_action_for_code ( cls , action_code , name , configuration , managed_input ) : Global . LOGGER . debug ( f"creating action {name} for code {action_code}" ) Global . LOGGER . debug ( f"configuration length: {len(configuration)}" ) Global . LOGGER . debug ( f"input: {managed_input}" ) my_actions_file = Action ... | Factory method to create an instance of an Action from an input code |
11,639 | def extract_class ( jar , name ) : with jar . open ( name ) as entry : return LinkableClass ( javatools . unpack_class ( entry ) ) | Extracts a LinkableClass from a jar . |
11,640 | def _format_summary_node ( self , task_class ) : modulename = task_class . __module__ classname = task_class . __name__ nodes = [ ] nodes . append ( self . _format_class_nodes ( task_class ) ) nodes . append ( self . _format_config_nodes ( modulename , classname ) ) methods = ( 'run' , 'runDataRef' ) for method in meth... | Format a section node containg a summary of a Task class s key APIs . |
11,641 | def _format_class_nodes ( self , task_class ) : modulename = task_class . __module__ classname = task_class . __name__ fullname = '.' . join ( ( modulename , classname ) ) signature = Signature ( task_class , bound_method = False ) desc_sig_node = self . _format_signature ( signature , modulename , classname , fullname... | Create a desc node summarizing the class docstring . |
11,642 | def _format_method_nodes ( self , task_method , modulename , classname ) : methodname = task_method . __name__ fullname = '.' . join ( ( modulename , classname , methodname ) ) signature = Signature ( task_method , bound_method = True ) desc_sig_node = self . _format_signature ( signature , modulename , classname , ful... | Create a desc node summarizing a method docstring . |
11,643 | def _create_doc_summary ( self , obj , fullname , refrole ) : summary_text = extract_docstring_summary ( get_docstring ( obj ) ) summary_text = summary_text . strip ( ) if summary_text . endswith ( '.' ) : summary_text = summary_text . rstrip ( '.' ) content_node_p = nodes . paragraph ( text = summary_text ) content_no... | Create a paragraph containing the object s one - sentence docstring summary with a link to further documentation . |
11,644 | def _create_api_details_link ( self , fullname , refrole ) : ref_text = '... <{}>' . format ( fullname ) xref = PyXRefRole ( ) xref_nodes , _ = xref ( refrole , ref_text , ref_text , self . lineno , self . state . inliner ) return xref_nodes | Appends a link to the API docs labelled as ... that is appended to the content paragraph of an API description . |
11,645 | def _format_config_nodes ( self , modulename , classname ) : fullname = '{0}.{1}.config' . format ( modulename , classname ) desc_sig_node = desc_signature ( ) desc_sig_node [ 'module' ] = modulename desc_sig_node [ 'class' ] = classname desc_sig_node [ 'fullname' ] = fullname prefix = 'attribute' desc_sig_node += desc... | Create a desc node summarizing the config attribute |
11,646 | def _format_import_example ( self , task_class ) : code = 'from {0.__module__} import {0.__name__}' . format ( task_class ) literal_node = nodes . literal_block ( code , code ) literal_node [ 'language' ] = 'py' return [ literal_node ] | Generate nodes that show a code sample demonstrating how to import the task class . |
11,647 | def _format_api_docs_link_message ( self , task_class ) : fullname = '{0.__module__}.{0.__name__}' . format ( task_class ) p_node = nodes . paragraph ( ) _ = 'See the ' p_node += nodes . Text ( _ , _ ) xref = PyXRefRole ( ) xref_nodes , _ = xref ( 'py:class' , '~' + fullname , '~' + fullname , self . lineno , self . st... | Format a message referring the reader to the full API docs . |
11,648 | def send_exception ( self ) : self . compiler . reset ( ) exc = traceback . format_exc ( ) self . writer . write ( exc . encode ( 'utf8' ) ) yield from self . writer . drain ( ) | When an exception has occurred write the traceback to the user . |
11,649 | def handle_one_command ( self ) : while True : yield from self . write_prompt ( ) codeobj = yield from self . read_command ( ) if codeobj is not None : yield from self . run_command ( codeobj ) | Process a single command . May have many lines . |
11,650 | def run_command ( self , codeobj ) : try : value , stdout = yield from self . attempt_exec ( codeobj , self . namespace ) except Exception : yield from self . send_exception ( ) return else : yield from self . send_output ( value , stdout ) | Execute a compiled code object and write the output back to the client . |
11,651 | def read_command ( self ) : reader = self . reader line = yield from reader . readline ( ) if line == b'' : raise ConnectionResetError ( ) try : codeobj = self . attempt_compile ( line . rstrip ( b'\n' ) ) except SyntaxError : yield from self . send_exception ( ) return return codeobj | Read a command from the user line by line . |
11,652 | def send_output ( self , value , stdout ) : writer = self . writer if value is not None : writer . write ( '{!r}\n' . format ( value ) . encode ( 'utf8' ) ) if stdout : writer . write ( stdout . encode ( 'utf8' ) ) yield from writer . drain ( ) | Write the output or value of the expression back to user . |
11,653 | def call ( self , method , * args ) : try : response = getattr ( self . client . service , method ) ( * args ) except ( URLError , SSLError ) as e : log . exception ( 'Failed to connect to responsys service' ) raise ConnectError ( "Request to service timed out" ) except WebFault as web_fault : fault_name = getattr ( we... | Calls the service method defined with the arguments provided |
11,654 | def connect ( self ) : if self . session and self . session . is_expired : self . disconnect ( abandon_session = True ) if not self . session : try : login_result = self . login ( self . username , self . password ) except AccountFault : log . error ( 'Login failed, invalid username or password' ) raise else : self . s... | Connects to the Responsys soap service |
11,655 | def disconnect ( self , abandon_session = False ) : self . connected = False if ( self . session and self . session . is_expired ) or abandon_session : try : self . logout ( ) except : log . warning ( 'Logout call to responsys failed, session may have not been terminated' , exc_info = True ) del self . session return T... | Disconnects from the Responsys soap service |
11,656 | def merge_list_members ( self , list_ , record_data , merge_rule ) : list_ = list_ . get_soap_object ( self . client ) record_data = record_data . get_soap_object ( self . client ) merge_rule = merge_rule . get_soap_object ( self . client ) return MergeResult ( self . call ( 'mergeListMembers' , list_ , record_data , m... | Responsys . mergeListMembers call |
11,657 | def merge_list_members_RIID ( self , list_ , record_data , merge_rule ) : list_ = list_ . get_soap_object ( self . client ) result = self . call ( 'mergeListMembersRIID' , list_ , record_data , merge_rule ) return RecipientResult ( result . recipientResult ) | Responsys . mergeListMembersRIID call |
11,658 | def delete_list_members ( self , list_ , query_column , ids_to_delete ) : list_ = list_ . get_soap_object ( self . client ) result = self . call ( 'deleteListMembers' , list_ , query_column , ids_to_delete ) if hasattr ( result , '__iter__' ) : return [ DeleteResult ( delete_result ) for delete_result in result ] retur... | Responsys . deleteListMembers call |
11,659 | def retrieve_list_members ( self , list_ , query_column , field_list , ids_to_retrieve ) : list_ = list_ . get_soap_object ( self . client ) result = self . call ( 'retrieveListMembers' , list_ , query_column , field_list , ids_to_retrieve ) return RecordData . from_soap_type ( result . recordData ) | Responsys . retrieveListMembers call |
11,660 | def create_table ( self , table , fields ) : table = table . get_soap_object ( self . client ) return self . call ( 'createTable' , table , fields ) | Responsys . createTable call |
11,661 | def create_table_with_pk ( self , table , fields , primary_keys ) : table = table . get_soap_object ( self . client ) return self . call ( 'createTableWithPK' , table , fields , primary_keys ) | Responsys . createTableWithPK call |
11,662 | def delete_table ( self , table ) : table = table . get_soap_object ( self . client ) return self . call ( 'deleteTable' , table ) | Responsys . deleteTable call |
11,663 | def delete_profile_extension_members ( self , profile_extension , query_column , ids_to_delete ) : profile_extension = profile_extension . get_soap_object ( self . client ) result = self . call ( 'deleteProfileExtensionMembers' , profile_extension , query_column , ids_to_delete ) if hasattr ( result , '__iter__' ) : re... | Responsys . deleteProfileExtensionRecords call |
11,664 | def retrieve_profile_extension_records ( self , profile_extension , field_list , ids_to_retrieve , query_column = 'RIID' ) : profile_extension = profile_extension . get_soap_object ( self . client ) return RecordData . from_soap_type ( self . call ( 'retrieveProfileExtensionRecords' , profile_extension , query_column ,... | Responsys . retrieveProfileExtensionRecords call |
11,665 | def truncate_table ( self , table ) : table = table . get_soap_object ( self . client ) return self . call ( 'truncateTable' , table ) | Responsys . truncateTable call |
11,666 | def delete_table_records ( self , table , query_column , ids_to_delete ) : table = table . get_soap_object ( self . client ) result = self . call ( 'deleteTableRecords' , table , query_column , ids_to_delete ) if hasattr ( result , '__iter__' ) : return [ DeleteResult ( delete_result ) for delete_result in result ] ret... | Responsys . deleteTableRecords call |
11,667 | def merge_table_records ( self , table , record_data , match_column_names ) : table = table . get_soap_object ( self . client ) record_data = record_data . get_soap_object ( self . client ) return MergeResult ( self . call ( 'mergeTableRecords' , table , record_data , match_column_names ) ) | Responsys . mergeTableRecords call |
11,668 | def merge_table_records_with_pk ( self , table , record_data , insert_on_no_match , update_on_match ) : table = table . get_soap_object ( self . client ) record_data = record_data . get_soap_object ( self . client ) return MergeResult ( self . call ( 'mergeTableRecordsWithPK' , table , record_data , insert_on_no_match ... | Responsys . mergeTableRecordsWithPK call |
11,669 | def merge_into_profile_extension ( self , profile_extension , record_data , match_column , insert_on_no_match , update_on_match ) : profile_extension = profile_extension . get_soap_object ( self . client ) record_data = record_data . get_soap_object ( self . client ) results = self . call ( 'mergeIntoProfileExtension' ... | Responsys . mergeIntoProfileExtension call |
11,670 | def retrieve_table_records ( self , table , query_column , field_list , ids_to_retrieve ) : table = table . get_soap_object ( self . client ) return RecordData . from_soap_type ( self . call ( 'retrieveTableRecords' , table , query_column , field_list , ids_to_retrieve ) ) | Responsys . retrieveTableRecords call |
11,671 | def normalize_docroot ( app , root ) : srcdir = app . env . srcdir default_version = app . config . javalink_default_version if isinstance ( root , basestring ) : ( url , base ) = _parse_docroot_str ( srcdir , root ) return { 'root' : url , 'base' : base , 'version' : default_version } else : normalized = { } normalize... | Creates a package - list URL and a link base from a docroot element . |
11,672 | def assign_valence ( mol ) : for u , v , bond in mol . bonds_iter ( ) : if bond . order == 2 : mol . atom ( u ) . pi = 1 mol . atom ( v ) . pi = 1 if mol . atom ( u ) . symbol == "O" and not mol . atom ( u ) . charge : mol . atom ( v ) . carbonyl_C = 1 if mol . atom ( v ) . symbol == "O" and not mol . atom ( v ) . char... | Assign pi electron and hydrogens |
11,673 | def assign_charge ( mol , force_recalc = False ) : mol . require ( "Aromatic" ) for i , nbrs in mol . neighbors_iter ( ) : atom = mol . atom ( i ) nbrcnt = len ( nbrs ) if atom . symbol == "N" : if not atom . pi : mol . atom ( i ) . charge_phys = 1 elif nbrcnt == 1 and atom . pi == 2 : ni = list ( nbrs . keys ( ) ) [ 0... | Assign charges in physiological condition |
11,674 | def get_type ( type_name ) : parts = type_name . split ( '.' ) if len ( parts ) < 2 : raise SphinxError ( 'Type must be fully-qualified, ' 'of the form ``module.MyClass``. Got: {}' . format ( type_name ) ) module_name = "." . join ( parts [ 0 : - 1 ] ) name = parts [ - 1 ] return getattr ( import_module ( module_name )... | Get a type given its importable name . |
11,675 | def get_task_config_fields ( config_class ) : from lsst . pex . config import Field def is_config_field ( obj ) : return isinstance ( obj , Field ) return _get_alphabetical_members ( config_class , is_config_field ) | Get all configuration Fields from a Config class . |
11,676 | def get_subtask_fields ( config_class ) : from lsst . pex . config import ConfigurableField , RegistryField def is_subtask_field ( obj ) : return isinstance ( obj , ( ConfigurableField , RegistryField ) ) return _get_alphabetical_members ( config_class , is_subtask_field ) | Get all configurable subtask fields from a Config class . |
11,677 | def _get_alphabetical_members ( obj , predicate ) : fields = dict ( inspect . getmembers ( obj , predicate ) ) keys = list ( fields . keys ( ) ) keys . sort ( ) return { k : fields [ k ] for k in keys } | Get members of an object sorted alphabetically . |
11,678 | def typestring ( obj ) : obj_type = type ( obj ) return '.' . join ( ( obj_type . __module__ , obj_type . __name__ ) ) | Make a string for the object s type |
11,679 | def get_docstring ( obj ) : docstring = getdoc ( obj , allow_inherited = True ) if docstring is None : logger = getLogger ( __name__ ) logger . warning ( "Object %s doesn't have a docstring." , obj ) docstring = 'Undocumented' return prepare_docstring ( docstring , ignore = 1 ) | Extract the docstring from an object as individual lines . |
11,680 | def extract_docstring_summary ( docstring ) : summary_lines = [ ] for line in docstring : if line == '' : break else : summary_lines . append ( line ) return ' ' . join ( summary_lines ) | Get the first summary sentence from a docstring . |
11,681 | def run ( self ) : request_cache = cache . get_request_cache ( ) session = client . get_client ( ) . create_session ( ) self . mark_incomplete ( ) universes = [ ] with session . no_autoflush : movies = session . query ( models . Movie ) . all ( ) for movie in movies : article = request_cache . get ( "http://marvel.wiki... | Run loading of movie appearances . |
11,682 | def run ( self ) : self . mark_incomplete ( ) session = client . get_client ( ) . create_session ( ) cpi = ConsumerPriceIndexFile ( ) . load ( ) max_cpi_year = cpi [ 'Year' ] . max ( ) cpi = cpi . set_index ( 'Year' ) [ 'Annual' ] for movie in session . query ( models . Movie ) . all ( ) : if movie . year is not None a... | Compute and store inflation - adjusted movie budgets |
11,683 | def _argsort ( y_score , k = None ) : ranks = y_score . argsort ( ) argsort = ranks [ : : - 1 ] if k is not None : argsort = argsort [ 0 : k ] return argsort | Returns the indexes in descending order of the top k score or all scores if k is None |
11,684 | def count ( y_true , y_score = None , countna = False ) : if not countna : return ( ~ np . isnan ( to_float ( y_true ) ) ) . sum ( ) else : return len ( y_true ) | Counts the number of examples . If countna is False then only count labeled examples i . e . those with y_true not NaN |
11,685 | def count_series ( y_true , y_score , countna = False ) : y_true , y_score = to_float ( y_true , y_score ) top = _argsort ( y_score ) if not countna : a = ( ~ np . isnan ( y_true [ top ] ) ) . cumsum ( ) else : a = range ( 1 , len ( y_true ) + 1 ) return pd . Series ( a , index = range ( 1 , len ( a ) + 1 ) ) | Returns series whose i - th entry is the number of examples in the top i |
11,686 | def baseline ( y_true , y_score = None ) : if len ( y_true ) > 0 : return np . nansum ( y_true ) / count ( y_true , countna = False ) else : return 0.0 | Number of positive labels divided by number of labels or zero if there are no labels |
11,687 | def roc_auc ( y_true , y_score ) : notnull = ~ np . isnan ( y_true ) fpr , tpr , thresholds = sklearn . metrics . roc_curve ( y_true [ notnull ] , y_score [ notnull ] ) return sklearn . metrics . auc ( fpr , tpr ) | Returns are under the ROC curve |
11,688 | def recall_series ( y_true , y_score , k = None , value = True ) : y_true , y_score = to_float ( y_true , y_score ) top = _argsort ( y_score , k ) if not value : y_true = 1 - y_true a = np . nan_to_num ( y_true [ top ] ) . cumsum ( ) return pd . Series ( a , index = np . arange ( 1 , len ( a ) + 1 ) ) | Returns series of length k whose i - th entry is the recall in the top i |
11,689 | def autorotate ( image , orientation = None ) : orientation_value = orientation if orientation else image . _getexif ( ) . get ( EXIF_KEYS . get ( 'Orientation' ) ) if orientation_value is None : raise ImDirectException ( "No orientation available in Exif " "tag or given explicitly." ) if orientation_value in ( 1 , 2 )... | Rotate and return an image according to its Exif information . |
11,690 | def imdirect_open ( fp ) : img = pil_open ( fp , 'r' ) if img . format == 'JPEG' : if isinstance ( fp , string_types ) : exif = piexif . load ( text_type_to_use ( fp ) ) else : fp . seek ( 0 ) exif = piexif . load ( fp . read ( ) ) orientation_value = exif . get ( '0th' , { } ) . get ( piexif . ImageIFD . Orientation )... | Opens identifies the given image file and rotates it if it is a JPEG . |
11,691 | def monkey_patch ( enabled = True ) : if enabled : Image . open = imdirect_open else : Image . open = pil_open | Monkey patching PIL . Image . open method |
11,692 | def save_with_exif_info ( img , * args , ** kwargs ) : if 'exif' in kwargs : exif = kwargs . pop ( 'exif' ) else : exif = img . info . get ( 'exif' ) img . save ( * args , exif = exif , ** kwargs ) | Saves an image using PIL preserving the exif information . |
11,693 | def create ( context , resource , ** kwargs ) : data = utils . sanitize_kwargs ( ** kwargs ) uri = '%s/%s' % ( context . dci_cs_api , resource ) r = context . session . post ( uri , timeout = HTTP_TIMEOUT , json = data ) return r | Create a resource |
11,694 | def get ( context , resource , ** kwargs ) : uri = '%s/%s/%s' % ( context . dci_cs_api , resource , kwargs . pop ( 'id' ) ) r = context . session . get ( uri , timeout = HTTP_TIMEOUT , params = kwargs ) return r | List a specific resource |
11,695 | def get_data ( context , resource , ** kwargs ) : url_suffix = '' if 'keys' in kwargs and kwargs [ 'keys' ] : url_suffix = '/?keys=%s' % ',' . join ( kwargs . pop ( 'keys' ) ) uri = '%s/%s/%s/data%s' % ( context . dci_cs_api , resource , kwargs . pop ( 'id' ) , url_suffix ) r = context . session . get ( uri , timeout =... | Retrieve data field from a resource |
11,696 | def update ( context , resource , ** kwargs ) : etag = kwargs . pop ( 'etag' ) id = kwargs . pop ( 'id' ) data = utils . sanitize_kwargs ( ** kwargs ) uri = '%s/%s/%s' % ( context . dci_cs_api , resource , id ) r = context . session . put ( uri , timeout = HTTP_TIMEOUT , headers = { 'If-match' : etag } , json = data ) ... | Update a specific resource |
11,697 | def delete ( context , resource , id , ** kwargs ) : etag = kwargs . pop ( 'etag' , None ) id = id subresource = kwargs . pop ( 'subresource' , None ) subresource_id = kwargs . pop ( 'subresource_id' , None ) uri = '%s/%s/%s' % ( context . dci_cs_api , resource , id ) if subresource : uri = '%s/%s/%s' % ( uri , subreso... | Delete a specific resource |
11,698 | def purge ( context , resource , ** kwargs ) : uri = '%s/%s/purge' % ( context . dci_cs_api , resource ) if 'force' in kwargs and kwargs [ 'force' ] : r = context . session . post ( uri , timeout = HTTP_TIMEOUT ) else : r = context . session . get ( uri , timeout = HTTP_TIMEOUT ) return r | Purge resource type . |
11,699 | def parse_rst_content ( content , state ) : container_node = nodes . section ( ) container_node . document = state . document viewlist = ViewList ( ) for i , line in enumerate ( content . splitlines ( ) ) : viewlist . append ( line , source = '' , offset = i ) with switch_source_input ( state , viewlist ) : state . nes... | Parse rST - formatted string content into docutils nodes |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.